Main update

This commit is contained in:
Gaulthier Gain 2022-09-09 11:43:03 +02:00
parent 4ae8d8e892
commit 4039686867
42 changed files with 2525 additions and 540 deletions

28
srcs/alignertool/args.go Normal file
View file

@ -0,0 +1,28 @@
// Copyright 2019 The UNICORE Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file
//
// Author: Gaulthier Gain <gaulthier.gain@uliege.be>
package alignertool
import (
"github.com/akamensky/argparse"
"os"
u "tools/srcs/common"
)
const (
rootArg = "root"
)
// ParseArguments parses arguments of the application.
//
// It returns an error if any, otherwise it returns nil.
func parseLocalArguments(p *argparse.Parser, args *u.Arguments) error {
args.InitArgParse(p, args, u.STRING, "r", rootArg,
&argparse.Options{Required: false, Help: "The root folder which contains unikernels directories "})
return u.ParserWrapper(p, os.Args)
}

View file

@ -0,0 +1,83 @@
// Copyright 2019 The UNICORE Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file
//
// Author: Gaulthier Gain <gaulthier.gain@uliege.be>
package alignertool
import (
"errors"
"os"
"path/filepath"
"tools/srcs/alignertool/ukManager"
"tools/srcs/binarytool/elf64analyser"
u "tools/srcs/common"
)
// RunAligner allows to run the aligner tool (which is out of the
// UNICORE toolchain).
func RunAligner(homeDir string) {
// Init and parse local arguments
args := new(u.Arguments)
p, err := args.InitArguments("--aligner",
"The Aligner tool allows to align several unikernels for memory deduplication")
if err != nil {
u.PrintErr(err)
}
if err := parseLocalArguments(p, args); err != nil {
u.PrintErr(err)
}
// Check if a json file is used or if it is via command line
manager := new(ukManager.Manager)
manager.MicroLibs = make(map[string]*ukManager.MicroLib)
if len(*args.StringArg[rootArg]) > 0 {
manager.Unikernels = make([]*ukManager.Unikernel, 0)
files, err := os.ReadDir(*args.StringArg[rootArg])
if err != nil {
u.PrintErr(err)
}
for _, file := range files {
if file.IsDir() {
basename := filepath.Join(*args.StringArg[rootArg], file.Name())
if _, err := os.Stat(filepath.Join(basename, "build")); !os.IsNotExist(err) {
// A build folder exist
basename = filepath.Join(basename, "build")
}
manager.Unikernels = append(manager.Unikernels, &ukManager.Unikernel{
BuildPath: basename,
})
}
}
} else {
u.PrintErr(errors.New("argument(s) must be provided"))
}
for _, uk := range manager.Unikernels {
uk.Analyser = new(elf64analyser.ElfAnalyser)
if len(uk.BuildPath) > 0 {
if uk.BuildPath[len(uk.BuildPath)-1] != os.PathSeparator {
uk.BuildPath += u.SEP
}
if err := uk.GetFiles(); err != nil {
u.PrintErr(err)
}
// Perform the inspection of micro-libs since we have the buildPath
uk.Analyser.InspectMappingList(uk.ElfFile, uk.ListObjs)
} else {
if err := uk.GetKernel(); err != nil {
u.PrintErr(err)
}
}
manager.ComputeAlignment(*uk)
}
manager.PerformAlignement()
}

View file

@ -0,0 +1,80 @@
package ukManager
import (
"fmt"
"strings"
"tools/srcs/binarytool/elf64analyser"
"tools/srcs/binarytool/elf64core"
)
type LinkerInfo struct {
ldsString string
rodataAddr uint64
dataAddr uint64
bssAddr uint64
}
const (
endtext_location = "<END_TEXT_REPLACE_LOCATION>"
rodata_location = "<RODATA_REPLACE_LOCATION>"
data_location = "<DATA_REPLACE_LOCATION>"
erodata_location = "<ERODATA_REPLACE_LOCATION>"
edata_location = "<EDATA_REPLACE_LOCATION>"
bss_location = "<BSS_REPLACE_LOCATION>"
tbss_location = "<TBSS_REPLACE_LOCATION>"
intrstack_location = "<INTRSTACK_REPLACE_LOCATION>"
ukinit_location = "<UK_INIT_REPLACE_LOCATION>"
inner_rodata = "<INNER_RODATA>"
inner_data = "<INNER_DATA>"
inner_bss = "<INNER_BSS>"
)
func getLdsContent() string {
return "SECTIONS\n{\n . = 0x100000;\n _text = .;\n .text :\n {\n KEEP (*(.data.boot))\n *(.text.boot)\n *(.text)\n *(.text.*) /* uncomment it to dissagregate functions */\n }\n\n<END_TEXT_REPLACE_LOCATION>\n _etext = .;\n . = ALIGN((1 << 12)); __eh_frame_start = .; .eh_frame : { *(.eh_frame) *(.eh_frame.*) } __eh_frame_end = .; __eh_frame_hdr_start = .; .eh_frame_hdr : { *(.eh_frame_hdr) *(.eh_frame_hdr.*) } __eh_frame_hdr_end = .;\n . = ALIGN((1 << 12)); uk_ctortab_start = .; .uk_ctortab : { KEEP(*(SORT_BY_NAME(.uk_ctortab[0-9]))) } uk_ctortab_end = .;\n<UK_INIT_REPLACE_LOCATION> uk_inittab_start = .; .uk_inittab : { KEEP(*(SORT_BY_NAME(.uk_inittab[1-6][0-9]))) } uk_inittab_end = .;\n\n<RODATA_REPLACE_LOCATION>\n . = ALIGN((1 << 12));\n _rodata = .;\n .rodata :\n {\n<INNER_RODATA>\n *(.rodata)\n *(.rodata.*)\n }\n\n<ERODATA_REPLACE_LOCATION>\n _erodata = .;\n . = ALIGN(0x8);\n _ctors = .;\n .preinit_array : {\n PROVIDE_HIDDEN (__preinit_array_start = .);\n KEEP (*(.preinit_array))\n PROVIDE_HIDDEN (__preinit_array_end = .);\n }\n . = ALIGN(0x8);\n .init_array : {\n PROVIDE_HIDDEN (__init_array_start = .);\n KEEP (*(SORT_BY_INIT_PRIORITY(.init_array.*) SORT_BY_INIT_PRIORITY(.ctors.*)))\n KEEP (*(.init_array .ctors))\n PROVIDE_HIDDEN (__init_array_end = .);\n }\n _ectors = .;\n . = ALIGN(0x8); _tls_start = .; .tdata : { *(.tdata) *(.tdata.*) *(.gnu.linkonce.td.*) } _etdata = .;\n\n<DATA_REPLACE_LOCATION>\n . = ALIGN((1 << 12));\n _data = .;\n .data :\n {\n<INNER_DATA>\n *(.data)\n *(.data.*)\n }\n\n<EDATA_REPLACE_LOCATION>\n _edata = .;\n . = ALIGN((1 << 12));\n\n<BSS_REPLACE_LOCATION>\n __bss_start = .;\n .bss :\n {\n<INNER_BSS>\n *(.bss)\n *(.bss.*)\n *(COMMON)\n . = ALIGN((1 << 12));\n }\n\n<TBSS_REPLACE_LOCATION>\n .tbss : { *(.tbss) *(.tbss.*) *(.gnu.linkonce.tb.*) . = ALIGN(0x8); } _tls_end = . + SIZEOF(.tbss);\n\n<INTRSTACK_REPLACE_LOCATION>\n .intrstack :\n {\n *(.intrstack)\n . = ALIGN((1 << 12));\n }\n _end = .;\n .comment 0 : { *(.comment) }\n .debug 0 : { *(.debug) } .line 0 : { *(.line) } .debug_srcinfo 0 : { *(.debug_srcinfo) } .debug_sfnames 0 : { *(.debug_sfnames) } .debug_aranges 0 : { *(.debug_aranges) } .debug_pubnames 0 : { *(.debug_pubnames) } .debug_info 0 : { *(.debug_info .gnu.linkonce.wi.*) } .debug_abbrev 0 : { *(.debug_abbrev) } .debug_line 0 : { *(.debug_line .debug_line.* .debug_line_end ) } .debug_frame 0 : { *(.debug_frame) } .debug_str 0 : { *(.debug_str) } .debug_loc 0 : { *(.debug_loc) } .debug_macinfo 0 : { *(.debug_macinfo) } .debug_weaknames 0 : { *(.debug_weaknames) } .debug_funcnames 0 : { *(.debug_funcnames) } .debug_typenames 0 : { *(.debug_typenames) } .debug_varnames 0 : { *(.debug_varnames) } .debug_pubtypes 0 : { *(.debug_pubtypes) } .debug_ranges 0 : { *(.debug_ranges) } .debug_macro 0 : { *(.debug_macro) } .gnu.attributes 0 : { KEEP (*(.gnu.attributes)) }\n /DISCARD/ : { *(.note.gnu.build-id) }\n}"
}
func processLdsFile(locationCnt uint64, maxValSection map[string]uint64) LinkerInfo {
type sectionLoc struct {
sec string
loc string
}
linkerInfo := LinkerInfo{}
// Use an array to preserver order
arrSection := []sectionLoc{
{sec: elf64core.RodataSection, loc: rodata_location},
{sec: elf64core.DataSection, loc: data_location},
{sec: elf64core.BssSection, loc: bss_location},
{sec: elf64core.TbssSection, loc: tbss_location}}
ldsString := getLdsContent()
// Update end of text
ldsString = strings.Replace(ldsString, endtext_location, fmt.Sprintf(". = 0x%x;", locationCnt), -1)
// Update ukinit
ldsString = strings.Replace(ldsString, ukinit_location, fmt.Sprintf(". = 0x%x;", locationCnt+0x60), -1)
locationCnt += elf64analyser.PageSize
for _, sect := range arrSection {
if sect.sec == elf64core.RodataSection {
linkerInfo.rodataAddr = locationCnt
} else if sect.sec == elf64core.DataSection {
// Update erodata just before data
ldsString = strings.Replace(ldsString, erodata_location, fmt.Sprintf(". = 0x%x;", locationCnt-elf64analyser.PageSize), -1)
linkerInfo.dataAddr = locationCnt
} else if sect.sec == elf64core.BssSection {
// Update edata just before bss
ldsString = strings.Replace(ldsString, edata_location, fmt.Sprintf(". = 0x%x;", locationCnt-elf64analyser.PageSize), -1)
linkerInfo.bssAddr = locationCnt
}
// Update rodata, data, bss, tbss
ldsString = strings.Replace(ldsString, sect.loc, fmt.Sprintf(". = 0x%x;", locationCnt), -1)
locationCnt += maxValSection[sect.sec]
}
// Update intrstack
linkerInfo.ldsString = strings.Replace(ldsString, intrstack_location, fmt.Sprintf(". = 0x%x;", locationCnt), -1)
return linkerInfo
}

View file

@ -3,6 +3,7 @@ package ukManager
import (
"fmt"
"math"
"path/filepath"
"sort"
"strings"
"tools/srcs/binarytool/elf64analyser"
@ -10,7 +11,11 @@ import (
u "tools/srcs/common"
)
const ukbootMain = "libukboot_main.ld.o"
const (
ukbootMain = "libukboot_main.o"
ukkvmPlat = "libkvmplat.o"
libnewlibc = "libnewlibc.o"
)
type Manager struct {
Unikernels []*Unikernel
@ -24,6 +29,7 @@ type MicroLib struct {
size uint64
instance int
sectionSize *SectionMicroLibs
usedBy []string
}
type SectionMicroLibs struct {
@ -56,9 +62,18 @@ func (manager *Manager) ComputeAlignment(unikernel Unikernel) {
manager.MicroLibs[libs.Name].sectionSize.dataSize = libs.DataSize
manager.MicroLibs[libs.Name].sectionSize.bssSize = libs.BssSize
}
//todo handle case where microlibs do not have same content -> use hash instead of name
}
manager.MicroLibs[libs.Name].instance += 1
} else {
// Add offset
if libs.Name == ukkvmPlat {
libs.RodataSize += 0x10
} else if libs.Name == libnewlibc {
libs.Size += 0x22
//libs.Size += 0x12 // for 9 apps
}
mlib := &MicroLib{
name: libs.Name,
startAddr: libs.StartAddr,
@ -111,9 +126,60 @@ func (manager *Manager) DisplayMicroLibs() {
}
}
func (manager *Manager) updateRodataInner(maxValSection map[string]uint64, linkerInfoGlobal *LinkerInfo) {
var totalSize uint64 = 0
var initRodata = linkerInfoGlobal.rodataAddr
for _, lib := range manager.SortedMicroLibs {
fmt.Printf("%s 0x%x \n", lib.name, lib.sectionSize.rodataSize)
if lib.instance > 1 {
// Update inner rodata location counter
lib.sectionSize.rodataAddr = linkerInfoGlobal.rodataAddr
linkerInfoGlobal.rodataAddr += roundAddr(lib.sectionSize.rodataSize, 32)
}
totalSize += lib.sectionSize.rodataSize
}
linkerInfoGlobal.dataAddr = roundAddr(initRodata+totalSize, elf64analyser.PageSize) + elf64analyser.PageSize
maxValSection[elf64core.RodataSection] = linkerInfoGlobal.dataAddr - initRodata
}
func (manager *Manager) updateDataInner(maxValSection map[string]uint64, linkerInfoGlobal *LinkerInfo) {
var totalSize uint64 = 0
var initData = linkerInfoGlobal.dataAddr
for _, lib := range manager.SortedMicroLibs {
if lib.instance > 1 {
// Update inner dataAddr location counter
lib.sectionSize.dataAddr = linkerInfoGlobal.dataAddr
linkerInfoGlobal.dataAddr += roundAddr(lib.sectionSize.dataSize, 32)
}
totalSize += lib.sectionSize.dataSize
}
linkerInfoGlobal.bssAddr = roundAddr(initData+totalSize, elf64analyser.PageSize) + elf64analyser.PageSize
maxValSection[elf64core.DataSection] = linkerInfoGlobal.bssAddr - initData
}
func (manager *Manager) updateBssInner(maxValSection map[string]uint64, linkerInfoGlobal *LinkerInfo) {
// Redo a pass on micro-libs to align inner rodata, data and bss
var totalSize uint64 = 0
var initBss = linkerInfoGlobal.bssAddr
for _, lib := range manager.SortedMicroLibs {
if lib.instance > 1 {
// Update inner bssAddr location counter
lib.sectionSize.bssAddr = linkerInfoGlobal.bssAddr
linkerInfoGlobal.bssAddr += roundAddr(lib.sectionSize.bssSize, 32)
}
totalSize += lib.sectionSize.bssSize
}
maxValSection[elf64core.BssSection] = roundAddr(initBss+totalSize, elf64analyser.PageSize) - initBss
}
func (manager *Manager) PerformAlignement() {
var startValue uint64 = 0x106000
var startValue uint64 = 0x107000
var locationCnt = startValue
commonMicroLibs := make([]*MicroLib, 0)
@ -122,13 +188,16 @@ func (manager *Manager) PerformAlignement() {
manager.sortMicroLibs()
}
manager.DisplayMicroLibs()
// Update micro-libs mapping globally and per unikernels
for i, lib := range manager.SortedMicroLibs {
if lib.instance == len(manager.Unikernels) {
// micro-libs common to all instances
lib.startAddr = locationCnt
commonMicroLibs = append(commonMicroLibs, lib)
locationCnt += lib.size
//locationCnt += lib.size
locationCnt += roundAddr(locationCnt, elf64analyser.PageSize)
} else if lib.instance > 1 {
// micro-libs common to particular instances
if manager.SortedMicroLibs[i-1].instance == len(manager.Unikernels) {
@ -155,6 +224,9 @@ func (manager *Manager) PerformAlignement() {
} else if lib.instance == 1 {
// micro-libs to only single instance
for _, uk := range manager.Unikernels {
if uk.alignedLibs == nil {
uk.InitAlignment()
}
uk.AddSingleMicroLibs(roundAddr(locationCnt, elf64analyser.PageSize), lib)
}
}
@ -164,6 +236,7 @@ func (manager *Manager) PerformAlignement() {
sections := []string{elf64core.RodataSection, elf64core.DataSection, elf64core.TbssSection, elf64core.BssSection}
// Find max location counter value through unikernels
fmt.Printf("0x%x\n", locationCnt)
for _, uk := range manager.Unikernels {
// Update the locationCnt by finding the maximum one from unikernel (the biggest size)
@ -178,44 +251,45 @@ func (manager *Manager) PerformAlignement() {
}
}
// Update the common lds file with new location counter
locationCnt = roundAddr(locationCnt, elf64analyser.PageSize)
linkerInfo := processLdsFile(locationCnt, maxValSection)
// Use temporary variable to keep linkerInfo unchanged
// Use temporary variable to keep locationCnt unchanged
linkerInfoGlobal := &LinkerInfo{
ldsString: "",
rodataAddr: linkerInfo.rodataAddr,
dataAddr: linkerInfo.dataAddr,
bssAddr: linkerInfo.bssAddr,
}
// Redo a pass on micro-libs to align inner rodata, data and bss
for _, lib := range manager.SortedMicroLibs {
if lib.instance > 1 {
// Update inner rodata location counter
lib.sectionSize.rodataAddr = linkerInfoGlobal.rodataAddr
linkerInfoGlobal.rodataAddr += roundAddr(lib.sectionSize.rodataSize, 32)
// Update inner dataAddr location counter
lib.sectionSize.dataAddr = linkerInfoGlobal.dataAddr
linkerInfoGlobal.dataAddr += roundAddr(lib.sectionSize.dataSize, 32)
// Update inner bssAddr location counter
lib.sectionSize.bssAddr = linkerInfoGlobal.bssAddr
linkerInfoGlobal.bssAddr += roundAddr(lib.sectionSize.bssSize, 32)
}
rodataAddr: locationCnt + elf64analyser.PageSize, // We know the address of rodata
dataAddr: 0,
bssAddr: 0,
}
// Update inner section
manager.updateRodataInner(maxValSection, linkerInfoGlobal)
manager.updateDataInner(maxValSection, linkerInfoGlobal)
manager.updateBssInner(maxValSection, linkerInfoGlobal)
// Update the common lds file with new location counter
linkerInfo := processLdsFile(locationCnt, maxValSection)
fmt.Printf("rodata 0x%x\n", linkerInfoGlobal.rodataAddr)
fmt.Printf("data 0x%x\n", linkerInfoGlobal.dataAddr)
fmt.Printf("bss 0x%x\n", linkerInfoGlobal.bssAddr)
// Update per unikernel
for _, uk := range manager.Unikernels {
var filename string
if !strings.Contains("libkvmplat", uk.BuildPath) {
filename = filepath.Join(uk.BuildPath, "libkvmplat", "link64_out.lds")
} else {
filename = filepath.Join(uk.BuildPath, "link64_out.lds")
}
u.PrintInfo("Writing aligned linker script into: " + filename)
uk.writeTextAlignment(startValue)
// todo remove and replace per uk.buildpath
lib := strings.Replace(strings.Split(uk.BuildPath, "/")[5], "lib-", "", -1)
/*lib := strings.Replace(strings.Split(uk.BuildPath, "/")[5], "lib-", "", -1)
dst := "/Users/gaulthiergain/Desktop/memory_dedup/gcc/lds/common_optimized_app_dce_size/link64_" + lib + ".lds"
uk.writeLdsToFile(dst, linkerInfo)*/
uk.writeLdsToFile(dst, linkerInfo)
uk.writeLdsToFile(filename, linkerInfo)
}
}
@ -223,6 +297,7 @@ func (manager *Manager) PerformAlignement() {
func findMaxValue(section string, uk *Unikernel, maxValSection map[string]uint64) {
index := uk.ElfFile.IndexSections[section]
size := uk.ElfFile.SectionsTable.DataSect[index].Elf64section.Size
if val, ok := maxValSection[section]; ok {
if val < size {
maxValSection[section] = size

View file

@ -8,6 +8,7 @@ package ukManager
import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"os"
@ -19,10 +20,11 @@ import (
)
const (
makefile = "Makefile"
config = "config"
ldExt = ".ld.o"
dbgExt = ".dbg"
makefile = "Makefile"
config = "config"
ldExt = ".ld.o"
objectExt = ".o"
dbgExt = ".dbg"
)
type Unikernels struct {
@ -30,19 +32,8 @@ type Unikernels struct {
}
type Unikernel struct {
BuildPath string `json:"buildPath"`
Kernel string `json:"kernel"`
SectionSplit string `json:"splitSection"`
DisplayMapping bool `json:"displayMapping"`
DisplayStatSize bool `json:"displayStatSize"`
ComputeLibsMapping bool `json:"computeLibsMapping"`
IgnoredPlats []string `json:"ignoredPlats"`
DisplayElfFile []string `json:"displayElfFile"`
DisplaySectionInfo []string `json:"displaySectionInfo"`
FindSectionByAddress []string `json:"findSectionByAddress"`
CompareGroup int `json:"compareGroup"`
BuildPath string `json:"buildPath"`
Kernel string `json:"kernel"`
// Used to generate new link.lds file
ComputeTextAddr string `json:"computeTextAddr"`
@ -117,16 +108,15 @@ func (uk *Unikernel) GetFiles() error {
continue
}
if strings.Contains(f.Name(), ldExt) &&
!stringInSlice(f.Name(), uk.IgnoredPlats) {
if filepath.Ext(f.Name()) == objectExt && !strings.Contains(f.Name(), ldExt) {
objFile, err := parseFile(uk.BuildPath, f.Name())
if err != nil {
return err
}
uk.ListObjs = append(uk.ListObjs, objFile)
} else if filepath.Ext(strings.TrimSpace(f.Name())) == dbgExt &&
!stringInSlice(f.Name(), uk.IgnoredPlats) && !foundExec {
} else if filepath.Ext(strings.TrimSpace(f.Name())) == dbgExt && !foundExec {
execName := f.Name()
if len(uk.Kernel) > 0 {
@ -142,55 +132,17 @@ func (uk *Unikernel) GetFiles() error {
if len(uk.Kernel) > 0 {
u.PrintInfo("Use specified ELF file: " + uk.ElfFile.Name + "(" + uk.BuildPath + ")")
} else {
u.PrintInfo("Use ELF file found in build folder: " + uk.ElfFile.Name)
} else if uk.ElfFile != nil {
u.PrintInfo("Use ELF file found in " + uk.BuildPath)
}
if uk.ElfFile == nil {
return errors.New("impossible to find executable in the given folder: " + uk.BuildPath)
}
return nil
}
func (uk *Unikernel) displayAllElfInfo() {
uk.ElfFile.Header.DisplayHeader()
uk.ElfFile.SectionsTable.DisplaySections()
uk.ElfFile.DisplayRelocationTables()
uk.ElfFile.DisplaySymbolsTables()
uk.ElfFile.DynamicTable.DisplayDynamicEntries()
uk.ElfFile.SegmentsTable.DisplayProgramHeader()
uk.ElfFile.SegmentsTable.DisplaySegmentSectionMapping()
uk.ElfFile.DisplayNotes()
uk.ElfFile.DisplayFunctionsTables(false)
}
func (uk *Unikernel) DisplayElfInfo() {
if len(uk.DisplayElfFile) == 1 && uk.DisplayElfFile[0] == "all" {
uk.displayAllElfInfo()
} else {
for _, d := range uk.DisplayElfFile {
if d == "header" {
uk.ElfFile.Header.DisplayHeader()
} else if d == "sections" {
uk.ElfFile.SectionsTable.DisplaySections()
} else if d == "relocations" {
uk.ElfFile.DisplayRelocationTables()
} else if d == "symbols" {
uk.ElfFile.DisplaySymbolsTables()
} else if d == "dynamics" {
uk.ElfFile.DynamicTable.DisplayDynamicEntries()
} else if d == "segments" {
uk.ElfFile.SegmentsTable.DisplayProgramHeader()
} else if d == "mapping" {
uk.ElfFile.SegmentsTable.DisplaySegmentSectionMapping()
} else if d == "notes" {
uk.ElfFile.DisplayNotes()
} else if d == "functions" {
uk.ElfFile.DisplayFunctionsTables(false)
} else {
u.PrintWarning("No display configuration found for argument: " + d)
}
}
}
}
func (uk *Unikernel) InitAlignment() {
uk.alignedLibs = &AlignedLibs{
startValueUk: 0,
@ -225,8 +177,11 @@ func (uk *Unikernel) writeTextAlignment(startValue uint64) {
uk.strBuilder = strings.Builder{}
uk.strBuilder.WriteString("SECTIONS\n{\n")
uk.strBuilder.WriteString(fmt.Sprintf(" . = 0x%x;\n", startValue))
startValueInit := startValue
for _, lib := range uk.alignedLibs.AllCommonMicroLibs {
uk.strBuilder.WriteString(fmt.Sprintf(" .text.%s : {\n\t %s(.text);\n }\n", strings.Replace(lib.name, ldExt, "", -1), lib.name))
uk.strBuilder.WriteString(fmt.Sprintf(" .text.%s 0x%x: {\n\t %s(.text);\n }\n", strings.Replace(lib.name, ldExt, "", -1), startValueInit, lib.name))
startValueInit += lib.size
}
for _, lib := range uk.alignedLibs.OnlyFewMicroLibs {

39
srcs/all_libs.txt Normal file
View file

@ -0,0 +1,39 @@
/home/gain/workspace/unikraft/lib/devfs/
/home/gain/workspace/unikraft/lib/fdt/
/home/gain/workspace/unikraft/lib/isrlib/
/home/gain/workspace/unikraft/lib/nolibc/
/home/gain/workspace/unikraft/lib/posix-libdl/
/home/gain/workspace/unikraft/lib/posix-process/
/home/gain/workspace/unikraft/lib/posix-sysinfo/
/home/gain/workspace/unikraft/lib/posix-user/
/home/gain/workspace/unikraft/lib/ramfs/
/home/gain/workspace/unikraft/lib/syscall_shim/
/home/gain/workspace/unikraft/lib/ubsan/
/home/gain/workspace/unikraft/lib/uk9p/
/home/gain/workspace/unikraft/lib/ukalloc/
/home/gain/workspace/unikraft/lib/ukallocbbuddy/
/home/gain/workspace/unikraft/lib/ukallocpool/
/home/gain/workspace/unikraft/lib/ukallocregion/
/home/gain/workspace/unikraft/lib/ukargparse/
/home/gain/workspace/unikraft/lib/ukblkdev/
/home/gain/workspace/unikraft/lib/ukboot/
/home/gain/workspace/unikraft/lib/ukbus/
/home/gain/workspace/unikraft/lib/ukcpio/
/home/gain/workspace/unikraft/lib/ukdebug/
/home/gain/workspace/unikraft/lib/uklibparam/
/home/gain/workspace/unikraft/lib/uklock/
/home/gain/workspace/unikraft/lib/ukmmap/
/home/gain/workspace/unikraft/lib/ukmpi/
/home/gain/workspace/unikraft/lib/uknetdev/
/home/gain/workspace/unikraft/lib/ukring/
/home/gain/workspace/unikraft/lib/ukrust/
/home/gain/workspace/unikraft/lib/uksched/
/home/gain/workspace/unikraft/lib/ukschedcoop/
/home/gain/workspace/unikraft/lib/uksglist/
/home/gain/workspace/unikraft/lib/uksignal/
/home/gain/workspace/unikraft/lib/uksp/
/home/gain/workspace/unikraft/lib/ukswrand/
/home/gain/workspace/unikraft/lib/uktest/
/home/gain/workspace/unikraft/lib/uktime/
/home/gain/workspace/unikraft/lib/uktimeconv/
/home/gain/workspace/unikraft/lib/vfscore/

View file

@ -7,6 +7,8 @@
package elf64analyser
import (
"crypto/sha256"
"encoding/hex"
"fmt"
"os"
"sort"
@ -136,7 +138,8 @@ func compareFunctions(elf *elf64core.ELF64File, obj *elf64core.ELF64File) (uint6
}
if len(elfFuncsAll) == 0 {
u.PrintWarning(fmt.Sprintf("Cannot extract mapping of lib %s: No function", obj.Name))
//todo
//u.PrintWarning(fmt.Sprintf("Cannot extract mapping of lib %s: No function", obj.Name))
return 0, 0, 0
}
@ -144,7 +147,8 @@ func compareFunctions(elf *elf64core.ELF64File, obj *elf64core.ELF64File) (uint6
// We do not have the same set of functions, need to filter it.
filteredFuncs := filterFunctions(objFuncs, elfFuncsAll)
if filteredFuncs == nil {
u.PrintWarning(fmt.Sprintf("Cannot extract mapping of lib %s: Different size", obj.Name))
//todo
//u.PrintWarning(fmt.Sprintf("Cannot extract mapping of lib %s: Different size", obj.Name))
return 0, 0, 0
}
return filteredFuncs[0].Addr, filteredFuncs[len(filteredFuncs)-1].Size +
@ -196,7 +200,6 @@ func (analyser *ElfAnalyser) InspectMappingList(elf *elf64core.ELF64File,
})
}
/*
func (analyser *ElfAnalyser) SplitIntoPagesBySection(elfFile *elf64core.ELF64File, sectionName string) {
if len(analyser.ElfPage) == 0 {
@ -215,7 +218,6 @@ func (analyser *ElfAnalyser) SplitIntoPagesBySection(elfFile *elf64core.ELF64Fil
}
}
func CreateNewPage(startAddress uint64, k int, raw []byte) *ElfPage {
byteArray := make([]byte, PageSize)
b := raw
@ -248,4 +250,3 @@ func (analyser *ElfAnalyser) computePage(elfFile *elf64core.ELF64File, section s
k++
}
}
*/

View file

@ -246,12 +246,14 @@ func (analyser *ElfAnalyser) DisplayStatSize(elfFile *elf64core.ELF64File) {
}
}
_, _ = fmt.Fprintf(w, "----------------------\t----------------------\t------\t----------------------------\n")
_, _ = fmt.Fprintf(w, "Total Size:\n")
_, _ = fmt.Fprintf(w, "Total Size of this unikernel:\n")
_, _ = fmt.Fprintf(w, "Section .text:\t%d (0x%x)\n", totalSizeText, totalSizeText)
_, _ = fmt.Fprintf(w, "All sections:\t%d (0x%x)\n", totalSizeElf, totalSizeElf)
_, _ = fmt.Fprintf(w, "#Pages (.text):\t%d\n", roundPage(float64(totalSizeText)/float64(PageSize)))
_, _ = fmt.Fprintf(w, "#Pages (all sections):\t%d\n", roundPage(float64(totalSizeElf)/float64(PageSize)))
_, _ = fmt.Fprintf(w, "All sections of this unikernel:\t%d (0x%x)\n", totalSizeElf, totalSizeElf)
/*_, _ = fmt.Fprintf(w, "#Pages (.text) of this unikernel:\t%d\n", roundPage(float64(totalSizeText)/float64(PageSize)))
_, _ = fmt.Fprintf(w, "#Pages (all sections) of this unikernel:\t%d\n", roundPage(float64(totalSizeElf)/float64(PageSize)))*/
_ = w.Flush()
}

View file

@ -0,0 +1,62 @@
package elf64disassembler
import (
"fmt"
"github.com/knightsc/gapstone"
"log"
"strconv"
"strings"
"tools/srcs/binarytool/elf64core"
)
//disassembler.Disass_section(elfFile, s)
func hex2int(hexStr string) uint64 {
// remove 0x suffix if found in the input string
cleaned := strings.Replace(hexStr, "0x", "", -1)
// base 16 for hexadecimal
result, _ := strconv.ParseUint(cleaned, 16, 64)
return uint64(result)
}
func Disass_section(elfFile *elf64core.ELF64File, sections *elf64core.DataSections) {
sect_data := elfFile.Raw[sections.Elf64section.FileOffset : sections.Elf64section.FileOffset+sections.Elf64section.Size]
engine, err := gapstone.New(
gapstone.CS_ARCH_X86,
gapstone.CS_MODE_64,
)
if err != nil {
log.Fatalf("Disassembly error: %v", err)
}
maj, min := engine.Version()
log.Printf("Hello Capstone! Version: %v.%v\n", maj, min)
insns, err := engine.Disasm(
sect_data, // code buffer
sections.Elf64section.VirtualAddress, // starting address
0,
)
if err != nil {
log.Fatalf("Disassembly error: %v", err)
}
log.Printf("Disasm: %d %d\n", len(sect_data), len(insns))
for _, insn := range insns {
callStr := ""
if insn.Mnemonic == "call" {
fmt.Printf("0x%x -> ", insn.Bytes)
fmt.Printf("0x%x:\t[%s]\t\t{%s}%s\n", insn.Address, insn.Mnemonic, insn.OpStr, callStr)
if val, ok := elfFile.MapFctAddrName[hex2int(insn.OpStr)]; ok {
println("\t" + val)
}
}
}
}

View file

@ -11,21 +11,26 @@ import (
"fmt"
"os"
"path/filepath"
"strconv"
"tools/srcs/binarytool/elf64analyser"
"tools/srcs/binarytool/ukManager"
u "tools/srcs/common"
)
const diffPath = "diff" + u.SEP
const pagesPath = "pages" + u.SEP
type BinaryManager struct {
Unikernels []*Unikernel
}
// RunBinaryAnalyser allows to run the binary analyser tool (which is out of the
// UNICORE toolchain).
func RunBinaryAnalyser(homeDir string) {
// Init and parse local arguments
args := new(u.Arguments)
p, err := args.InitArguments()
p, err := args.InitArguments("--binary",
"The Binary tool allows to help developers to gather stats on unikernels binaries")
if err != nil {
u.PrintErr(err)
}
@ -34,10 +39,10 @@ func RunBinaryAnalyser(homeDir string) {
}
// Check if a json file is used or if it is via command line
manager := new(ukManager.Manager)
manager.MicroLibs = make(map[string]*ukManager.MicroLib)
manager := new(BinaryManager)
if len(*args.StringArg[rootArg]) > 0 {
manager.Unikernels = make([]*ukManager.Unikernel, 0)
manager.Unikernels = make([]*Unikernel, 0)
mapping := false
if *args.BoolArg[mappingArg] {
mapping = true
@ -48,19 +53,26 @@ func RunBinaryAnalyser(homeDir string) {
}
for _, file := range files {
if file.IsDir() {
println(file.Name())
manager.Unikernels = append(manager.Unikernels, &ukManager.Unikernel{
BuildPath: filepath.Join(*args.StringArg[rootArg], file.Name()),
basename := filepath.Join(*args.StringArg[rootArg], file.Name())
if _, err := os.Stat(filepath.Join(basename, "build")); !os.IsNotExist(err) {
// A build folder exist
basename = filepath.Join(basename, "build")
}
manager.Unikernels = append(manager.Unikernels, &Unikernel{
BuildPath: basename,
DisplayMapping: mapping,
})
}
}
} else if len(*args.StringArg[filesArg]) > 0 {
var err error
manager.Unikernels, err = ukManager.ReadJsonFile(*args.StringArg[filesArg])
manager.Unikernels, err = ReadJsonFile(*args.StringArg[filesArg])
if err != nil {
u.PrintErr(err)
}
} else {
u.PrintErr(errors.New("argument(s) must be provided"))
}
@ -68,13 +80,28 @@ func RunBinaryAnalyser(homeDir string) {
var comparison elf64analyser.ComparisonElf
comparison.GroupFileSegment = make([]*elf64analyser.ElfFileSegment, 0)
var UnikernelsPath []string
UnikernelsPath = append(UnikernelsPath, "-u")
var SplitSects []string
SplitSects = append(SplitSects, "-l")
for i, uk := range manager.Unikernels {
uk.Analyser = new(elf64analyser.ElfAnalyser)
if len(uk.BuildPath) > 0 {
if _, err := os.Stat(filepath.Join(uk.BuildPath, "build")); !os.IsNotExist(err) {
// A build folder exist
uk.BuildPath = filepath.Join(uk.BuildPath, "build")
} else {
u.PrintWarning("Cannot find 'build/' folder, skip this configuration...")
}
if uk.BuildPath[len(uk.BuildPath)-1] != os.PathSeparator {
uk.BuildPath += u.SEP
}
if err := uk.GetFiles(); err != nil {
u.PrintErr(err)
}
@ -109,83 +136,30 @@ func RunBinaryAnalyser(homeDir string) {
uk.Analyser.FindSectionByAddress(uk.ElfFile, uk.FindSectionByAddress)
}
manager.ComputeAlignment(*uk)
/*if uk.CompareGroup > 0 {
foundSection := false
section := uk.SectionSplit
for _, s := range uk.ElfFile.SectionsTable.DataSect {
if s.Name == section {
foundSection = true
break
}
}
if foundSection && len(uk.SectionSplit) > 0 {
path := homeDir + u.SEP + pagesPath
if _, err := os.Stat(path); os.IsNotExist(err) {
err := os.Mkdir(path, os.ModePerm)
if err != nil {
u.PrintErr(err)
}
}
u.PrintInfo(fmt.Sprintf("Splitting %s section of %s into pages...", section, uk.ElfFile.Name))
uk.Analyser.SplitIntoPagesBySection(uk.ElfFile, section)
out := path + section[1:] + u.SEP
if _, err := os.Stat(out); os.IsNotExist(err) {
err := os.Mkdir(out, os.ModePerm)
if err != nil {
u.PrintErr(err)
}
}
if err := elf64analyser.SavePagesToFile(uk.Analyser.ElfPage, out+uk.ElfFile.Name+".txt", false); err != nil {
u.PrintErr(err)
}
u.PrintOk(fmt.Sprintf("Pages of section %s (%s) are saved into %s", section, uk.ElfFile.Name, out))
comparison.GroupFileSegment = append(comparison.GroupFileSegment,
&elf64analyser.ElfFileSegment{Filename: uk.ElfFile.Name,
NbPages: len(uk.Analyser.ElfPage), Pages: uk.Analyser.ElfPage})
} else if len(uk.SectionSplit) > 0 {
u.PrintWarning("Section '" + section + "' is not found in the ELF file")
}
}*/
}
manager.PerformAlignement()
/*
if uk.ComputeLibsMapping && len(uk.LibsMapping) > 0 {
if err != nil {
u.PrintErr(err)
} else {
uk.Analyser.ComputeAlignedMapping(uk.ElfFile, uk.LibsMapping)
}
}
*/
if len(comparison.GroupFileSegment) > 1 {
// Perform the comparison
path := homeDir + u.SEP + diffPath
if _, err := os.Stat(path); os.IsNotExist(err) {
err := os.Mkdir(path, os.ModePerm)
if err != nil {
u.PrintErr(err)
if uk.CompareGroup > 0 {
UnikernelsPath = append(UnikernelsPath, uk.BuildPath+uk.Kernel)
for _, sp := range uk.SplitSections {
SplitSects = append(SplitSects, sp)
}
}
comparison.ComparePageTables()
if err := comparison.DiffComparison(path); err != nil {
u.PrintWarning(err)
}
comparison.DisplayComparison()
}
u.PrintInfo("Analysing the following sections of " + strconv.Itoa(len(UnikernelsPath)-1) + " unikernels. This may take some time...")
for i, sect := range SplitSects {
if i > 0 {
println("- " + sect)
}
}
script := filepath.Join(os.Getenv("GOPATH"), "src", "tools", "srcs", "binarytool", "scripts", "uk_elf_sharing.py")
out, err := u.ExecuteCommand(script, append(UnikernelsPath, SplitSects...))
if err != nil {
u.PrintErr(err)
}
u.PrintOk("Finish to analyse " + strconv.Itoa(len(UnikernelsPath)-1) + " unikernels")
u.PrintInfo("Displaying stats")
println(out)
u.PrintOk("Diff files have been saved to ./diff/")
u.PrintOk("Pages files have been saved to ./pages/")
}

View file

@ -0,0 +1,360 @@
#!/usr/bin/python3
import os
import math
import sys
import diff_match_patch as dmp_module
import argparse
from capstone import *
from subprocess import run, PIPE
from uk_sharing_class import *
from collections import defaultdict
from elftools.elf.elffile import ELFFile
VERBOSE=True
DIFF=False
RENDER=False
SAVE_TO_PAGE=False
class bcolors:
HEADER = '\033[95m'
INFO = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
ERROR = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
SECTION_NAMES = ['.text.*']
UNIKERNEL_NAMES = ['/Users/gaulthiergain/Desktop/buildCompare/lib-nginx/build/unikernel_kvm-x86_64_local_align.dbg', '/Users/gaulthiergain/Desktop/buildCompare/lib-echoreply/build/unikernel_kvm-x86_64_local_align.dbg']
def print_error(msg):
print("[" + bcolors.ERROR + bcolors.BOLD + "WARNING" + bcolors.ENDC + "]" + msg)
def print_info(msg):
print("[" + bcolors.INFO + bcolors.BOLD + "INFO" + bcolors.ENDC + "]" + msg)
def print_warning(msg):
print("[" + bcolors.WARNING + bcolors.BOLD + "WARNING" + bcolors.ENDC + "]" + msg)
def get_unikernels(uks):
unikernels = list()
for uk in uks:
#print_info("- Analyse unikernel: {}".format(uk.split("/")[-1]))
unikernels.append(Unikernel(uk.split("/")[-1], uk))
return unikernels
def disassemble(uk, page):
md = Cs(CS_ARCH_X86, CS_MODE_64)
#md.detail = True
for i in md.disasm(page.content, page.start):
page.instructions.append(Instruction(i.address, i.mnemonic, i.op_str, i.bytes))
page.instructions_to_string(uk.map_symbols)
def compare_pages(a, b, size):
for i in range(size):
if a[i] != b[i]:
return False
return True
def process_symbols(uk, lines):
for l in lines:
group = l.split()
if len(group) == 3:
symbol = Symbol(int(group[0],16), group[2], group[1])
uk.map_symbols[symbol.address].append(symbol)
else:
print_warning("Ignoring symbol {}".format(l))
def get_symbols(uk):
p = run( ['nm', '--no-demangle',uk.name], stdout=PIPE, stderr=PIPE, universal_newlines=True)
if p.returncode == 0 and len(p.stdout) > 0:
process_symbols(uk, p.stdout.splitlines())
elif len(p.stderr) > 0:
print_warning("- stderr:" + str(p.stderr))
else:
print_error("- [ERROR] Failure to run NM")
sys.exit(1)
def process_file_aslr_dce(uk):
with open(uk.name, 'rb') as f:
elffile = ELFFile(f)
for segment in elffile.iter_segments():
uk.segments.append(Segment(segment['p_vaddr'], segment['p_offset'], segment['p_memsz']))
first = None
last = None
for section in elffile.iter_sections():
uk_sect = Section(section.name , section['sh_addr'], section['sh_offset'], section['sh_size'], section['sh_addralign'])
if section.name == '.text':
# Add it to the beginning before other ".text" sections
uk.sections.insert(1, uk_sect)
elif len(section.name) == 0:
## Empty section
uk.sections.insert(0, uk_sect)
elif uk_sect.start == 0:
## No loadable section (ignore)
continue
elif section.name.startswith(".text.") and first==None:
first = uk_sect
continue
elif section.name.startswith(".text."):
last = uk_sect
continue
else:
uk.sections.append(uk_sect)
uk.map_addr_section[section['sh_addr']] = uk_sect
uk_sect = Section(".text.code" , first.start, first.offset, last.start-first.start+last.size, first.alignment)
uk.sections.append(uk_sect)
uk.map_addr_section[section['sh_addr']] = uk_sect
def process_file(uk):
with open(uk.name, 'rb') as f:
elffile = ELFFile(f)
for segment in elffile.iter_segments():
uk.segments.append(Segment(segment['p_vaddr'], segment['p_offset'], segment['p_memsz']))
for section in elffile.iter_sections():
uk_sect = Section(section.name , section['sh_addr'], section['sh_offset'], section['sh_size'], section['sh_addralign'])
if section.name == '.text':
# Add it to the beginning before other ".text" sections
uk.sections.insert(1, uk_sect)
elif len(section.name) == 0:
## Empty section
uk.sections.insert(0, uk_sect)
elif uk_sect.start == 0:
## No loadable section (ignore)
continue
else:
uk.sections.append(uk_sect)
uk.map_addr_section[section['sh_addr']] = uk_sect
def myround(x, base=PAGE_SIZE):
return base * math.ceil(x / base)
def page_to_file(s, i, page, args, path):
name = path + s.name.replace(".", "") + "_page_" + str(i)
if args.verbose:
print("- Save page {} into file {}.bin-txt".format(page.number, name))
with open((name + ".bin"), "wb") as f:
f.write(page.content)
with open((name + ".txt"), "w") as f:
f.write(page.instructions_string)
def process_pages(uk, args, path):
for s in uk.sections:
if s.name in args.list:
for i, p in enumerate(range(0, len(s.data), PAGE_SIZE)):
page = Page("", i, s.start+p, PAGE_SIZE, uk.shortname, s.name, s.data[p:p+PAGE_SIZE])
disassemble(uk, page)
s.pages.append(page)
if args.pages:
page_to_file(s, i, page, args, path)
def process_data_sections(uk, all_text_section, args):
#print_info("Processing sections ...")
addresses = list()
with open(uk.name, 'rb') as f:
elffile = ELFFile(f)
if args.outputVMA:
uk.byte_array = bytearray([0] * (uk.segments[0].address+uk.segments[0].size))
for s in uk.sections:
if all_text_section and s.name.startswith(".text"):
args.list.insert(0, s.name )
if s.name in args.list:
s.data = elffile.get_section_by_name(s.name).data()
if args.outputVMA:
# Put the data to the binary representation
uk.byte_array[s.start:s.start+len(s.data)] = s.data
# Add to list for minimize
addresses.append((s.start, s.size))
if args.verbose:
print("- [{}] Start: 0x{:02x} (size: 0x{:02x}/{}) End: 0x{:02x} (roundup: 0x{:02x})".format(s.name, s.start, len(s.data), s.size, s.start+s.size, myround(s.start+s.size)))
return addresses
def process_stats(same_pages, unikernels, args):
#print("\n-----[{}]-------".format(process_stats.__name__.upper()))
total_pages = 0
for uk in unikernels:
for s in uk.sections:
if s.name in args.list:
for i, p in enumerate(s.pages):
#if args.verbose:
# print(" Page {}: 0x{:02x} - 0x{:02x} [#0: {}] ({}:{})".format(p.number, p.start, p.end, p.zeroes, uk.shortname, s.name))
if p.hash in same_pages:
m = same_pages[p.hash]
same = compare_pages(m[0].content, p.content, PAGE_SIZE)
if same:
same_pages[p.hash].append(p)
else:
print_warning("False positive " + str(i))
else:
same_pages[p.hash].append(p)
total_pages += 1
return total_pages
def process_diff(workdir, map_same_pages, args):
#print_info("Processing diff between pages...")
map_distinct_pages = defaultdict(list) # used when two pages of a same section are different
for _,v in map_same_pages.items():
if len(v) == 1:
map_distinct_pages[v[0].sect_name+str(v[0].number)].append(v[0])
path = os.path.join(workdir, DIFF_FOLDER)
print(path)
if not os.path.exists(path):
os.makedirs(path)
for k,v in map_distinct_pages.items():
if len(v) > 1:
if args.verbose:
print("- Compare {} between {} instances".format(k, len(v)))
dmp = dmp_module.diff_match_patch()
diff = dmp.diff_main(v[0].instructions_string, v[1].instructions_string)
html = dmp.diff_prettyHtml(diff)
current_function = ""
if args.render:
body = ""
for h in html.split("<br>"):
if "== " in h:
current_function = (h + "<br>").replace(";", "")
if "del" in h or "ins" in h:
body += current_function.replace("&para", "<br>")
body += h.replace("&para", "<br>").replace(";", "")
current_function = ""
html = body
else:
html = html.replace("&para", "").replace(";", "")
with open(("{}{}_page_{}_{}.html".format(path, k.replace(".", ""), v[0].number, v[1].number)), "w") as f:
f.write(html)
def display_stats(map_same_pages, totalPages, args, totalZeroes):
reduction = list()
#print_info("Displaying stats")
pages_sharing = 0
pages_shared = 0
total_frames = 0
for k,v in map_same_pages.items():
if len(v) > 1:
pages_shared += 1
total_frames += 1
pages_sharing += len(v)
reduction.append(len(v))
#if args.verbose:
# print(" {}: {} -> 1".format(k[0:10], len(v)))
else:
total_frames += 1
p = v[0]
if args.verbose:
print(" {}: {} -> Page {}: 0x{:02x} - 0x{:02x} [{}] ({}:{})".format(k[0:10], len(v), p.number, p.start, p.end, p.zeroes, p.uk_name, p.sect_name))
print("- TOTAL PAGES: %d" % totalPages)
print("- TOTAL PAGES SHARED: %d" % pages_shared)
print("- TOTAL PAGES SHARING: %d" % pages_sharing)
print("- TOTAL ZEROES PAGES: {}".format(totalZeroes))
print("- TOTAL NO-ZEROES PAGES: {}".format(totalPages-totalZeroes))
print("- SHARING: %.2f (%d/%d)" % ((pages_sharing/totalPages) * 100, pages_sharing, totalPages))
print("- TOTAL FRAMES: {}".format(total_frames))
print("- TOTAL MB: {}".format((total_frames * PAGE_SIZE)/(1024*1024)))
return reduction
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-w', '--workdir', help='Path to json workdir to analyse', type=str, default=".")
parser.add_argument('-l','--list', help='Sections names to consider as a list (-l sect1 sect2 ...)', nargs='+', default=SECTION_NAMES)
parser.add_argument('-o','--outputVMA', help='Save all vma to binary file', default=False)
parser.add_argument('-p','--pages', help='Save pages to bin file', default=SAVE_TO_PAGE)
parser.add_argument('-m','--minimize', help='Minimize the size (remove leading zero to binary file)', default=False)
parser.add_argument('-u','--unikernels', help='Unikernels to compare as a list (-u uk1 uk2 ...)', nargs='+', default=UNIKERNEL_NAMES)
parser.add_argument('-s','--stats', help='Stats on pages sharing', default=True)
parser.add_argument('-v','--verbose', help='verbose mode', default=VERBOSE)
parser.add_argument('-d','--diff', help='Perform diff between pages', default=DIFF)
parser.add_argument('-r','--render', help='view diff only in html', default=RENDER)
args = parser.parse_args()
unikernels = get_unikernels(args.unikernels)
# Create pages folder
if args.pages and not os.path.exists(os.path.join(args.workdir, PAGE_FOLDER)):
os.makedirs(os.path.join(args.workdir, PAGE_FOLDER))
#filter '.text.*' in
all_text_section = False
if '.text.*' in args.list:
all_text_section = True
for uk in unikernels:
get_symbols(uk)
# Get the full folder path name for exporting pages
path = os.path.join(args.workdir, PAGE_FOLDER, uk.shortname) + os.sep
if args.pages and not os.path.exists(path):
os.makedirs(path)
# Process the elf file
process_file(uk)
addresses = process_data_sections(uk, all_text_section, args)
process_pages(uk, args, path)
if args.outputVMA:
if args.minimize:
(addr_1, _) = addresses[0]
(addr_2, size_2) = addresses[-1]
uk.byte_array = uk.byte_array[addr_1:myround(addr_2 + size_2)]
with open(path + uk.shortname+".bin", "wb") as f:
f.write(uk.byte_array)
map_same_pages = defaultdict(list)
total_pages = process_stats(map_same_pages, unikernels, args)
if args.diff:
process_diff(args.workdir, map_same_pages, args)
if args.stats:
display_stats(map_same_pages, total_pages, args, 0)
if __name__ == "__main__":
main()

View file

@ -0,0 +1,186 @@
import re
import os
import hashlib
from collections import defaultdict
from binascii import hexlify
DIFF_FOLDER="diff" + os.sep
PAGE_FOLDER="pages" + os.sep
PAGE_VMA_FOLDER="pages_vma" + os.sep
DBG_EXTENSION = ".dbg"
PAGE_SIZE = 4096
SIZE = "size"
ALIGN = "_local_align"
ASLR_PLT = "_aslr_plt"
ASLR_DCE = "_aslr_dce"
ASLR_DEFAULT = "_aslr_default"
class Unikernel:
def __init__(self, shortname, name):
self.shortname = self.filter_name(shortname)
self.name = name
self.type_unikernel = self.process_name(shortname)
self.segments = list()
self.sections = list()
self.byte_array = None
self.map_addr_section = dict()
self.map_symbols = defaultdict(list)
self.dump = None
def filter_name(self, name):
return name.strip(DBG_EXTENSION)
def process_name(self, shortname):
if ALIGN in shortname:
return ALIGN
elif SIZE in shortname:
return SIZE
elif ASLR_PLT in shortname:
return ASLR_PLT
elif ASLR_DEFAULT in shortname:
return ASLR_DEFAULT
elif ASLR_DCE in shortname:
return ASLR_DCE
else:
return ""
class Segment:
def __init__(self, address, offset, size):
self.address = address
self.offset = offset
self.size = size
class Section:
def __init__(self, name, start, offset, size, alignment):
self.name = name
self.start = start
self.start_align = self.round_mult()
self.offset = offset
self.size = size
self.alignment = alignment
self.end = start+size
self.pages = list()
self.data = None
def round_mult(self, base=PAGE_SIZE):
if self.start % PAGE_SIZE != 0:
return base * round(self.start / base)
return self.start
class Symbol:
def __init__(self, address, name, info):
self.address = address
self.name = name
self.info = info
class Page:
def __init__(self, name, number, start, size, uk_name, sect_name, content):
self.name = name
self.number = number
self.start = start
self.size = size
self.end = self.start+self.size
self.uk_name = uk_name
self.sect_name = sect_name
self.content = self.process_content(content)
self.instructions = list()
self.instructions_string = ""
self.others = defaultdict(list)
self.zeroes = self.count_zeroes()
self.hash = hashlib.sha256(self.content).hexdigest()
def warning_symbol(self, map_symbols, addr):
if len(map_symbols[addr]) > 1:
#print("[WARNING] several symbols for {:02x}".format(addr))
for t in map_symbols[addr]:
#print("\t-{} {} 0x{:02x}".format(t.name, t.info, t.address))
if t.name not in self.others[t.address]:
self.others[t.address].append(t.name)
return " + " + str(len(map_symbols[addr])) + "others "
return ""
def disassemble_bytes(self):
str_ascii = ""
for i, b in enumerate(self.content):
if i == 0:
self.instructions_string +="0x{:02x}".format(self.start) + ": "
if i > 0 and i % 4 == 0:
self.instructions_string += " "
if i > 0 and i % 16 == 0:
self.instructions_string += str_ascii
self.instructions_string += "\n" + "0x{:02x}".format(self.start + i) + ": "
str_ascii = ""
fb = "{:02x}".format(b)
self.instructions_string += fb
if int(fb, 16) > 20 and int(fb, 16) < 126:
ascii_str = "%s" % bytearray.fromhex(fb).decode()
else:
ascii_str = "."
str_ascii += ascii_str
def instructions_to_string(self, map_symbols):
for ins in self.instructions:
function_call = ""
# FUNCTION NAME
if ins.address in map_symbols:
ret = self.warning_symbol(map_symbols, ins.address)
self.instructions_string += "\n[== " + map_symbols[ins.address][0].name + ret + " ==]\n"
#FUNCTION CALL
regex = r"0x[a-f0-9]*"
matches = re.finditer(regex, ins.op_str)
for _, z in enumerate(matches, start=1):
addr = int(z.group(),16)
if addr in map_symbols:
ret = self.warning_symbol(map_symbols, addr)
function_call = "(" + map_symbols[addr][0].name + ret + ")"
self.instructions_string += "{: <32} 0x{:02x} {: <10} {: <10}{}\n".format(ins.bytes, ins.address, ins.mnemonic, ins.op_str, function_call)
for k,values in self.others.items():
self.instructions_string += "\n0x{:02x}: [".format(k)
for v in values:
self.instructions_string += "" + v + ","
self.instructions_string +=("]")
def process_content(self, content):
# ALIGN if necessary
if len(content) % PAGE_SIZE != 0:
byte_array = bytearray([0] * PAGE_SIZE)
byte_array[0:len(content)] = content
return byte_array
return content
def count_zeroes(self):
zeroes = 0
for c in self.content:
if c == 0:
zeroes += 1
return zeroes
class Instruction:
def __init__(self, address, mnemonic, op_str, _bytes):
self.address = address
self.mnemonic = mnemonic
self.op_str = op_str
self.bytes = self.cut(hexlify(_bytes).decode())
def cut(self, line, n=2):
return ' '.join([line[i:i+n] for i in range(0, len(line), n)])
class Dump:
def __init__(self, shortname, name, content):
self.shortname = shortname
self.name = name
self.content = content
self.pages = list()

View file

@ -1,103 +0,0 @@
package ukManager
import (
"bufio"
"fmt"
"log"
"os"
"strings"
"tools/srcs/binarytool/elf64analyser"
"tools/srcs/binarytool/elf64core"
)
type LinkerInfo struct {
ldsString string
rodataAddr uint64
dataAddr uint64
bssAddr uint64
}
const (
endtext_location = "<END_TEXT_REPLACE_LOCATION>"
rodata_location = "<RODATA_REPLACE_LOCATION>"
data_location = "<DATA_REPLACE_LOCATION>"
erodata_location = "<ERODATA_REPLACE_LOCATION>"
edata_location = "<EDATA_REPLACE_LOCATION>"
bss_location = "<BSS_REPLACE_LOCATION>"
tbss_location = "<TBSS_REPLACE_LOCATION>"
intrstack_location = "<INTRSTACK_REPLACE_LOCATION>"
ukinit_location = "<UK_INIT_REPLACE_LOCATION>"
inner_rodata = "<INNER_RODATA>"
inner_data = "<INNER_DATA>"
inner_bss = "<INNER_BSS>"
)
func readLdsContent(filename string) string {
strBuilder := strings.Builder{}
file, err := os.Open(filename)
if err != nil {
log.Fatal(err)
}
defer file.Close()
scanner := bufio.NewScanner(file)
// optionally, resize scanner's capacity for lines over 64K, see next example
for scanner.Scan() {
strBuilder.WriteString(scanner.Text() + "\n")
}
if err := scanner.Err(); err != nil {
log.Fatal(err)
}
return strBuilder.String()
}
func processLdsFile(locationCnt uint64, maxValSection map[string]uint64) LinkerInfo {
type sectionLoc struct {
sec string
loc string
}
linkerInfo := LinkerInfo{}
// Use an array to preserver order
arrSection := []sectionLoc{
{sec: elf64core.RodataSection, loc: rodata_location},
{sec: elf64core.DataSection, loc: data_location},
{sec: elf64core.BssSection, loc: bss_location},
{sec: elf64core.TbssSection, loc: tbss_location}}
ldsString := readLdsContent("lds/common.ld")
// Update end of text
ldsString = strings.Replace(ldsString, endtext_location, fmt.Sprintf(". = 0x%x;", locationCnt), -1)
// Update ukinit
ldsString = strings.Replace(ldsString, ukinit_location, fmt.Sprintf(". = 0x%x;", locationCnt+0x40), -1)
locationCnt += elf64analyser.PageSize
for _, sect := range arrSection {
if sect.sec == elf64core.RodataSection {
linkerInfo.rodataAddr = locationCnt
} else if sect.sec == elf64core.DataSection {
// Update erodata just before data
ldsString = strings.Replace(ldsString, erodata_location, fmt.Sprintf(". = 0x%x;", locationCnt), -1)
locationCnt += elf64analyser.PageSize
linkerInfo.dataAddr = locationCnt
} else if sect.sec == elf64core.BssSection {
// Update edata just before bss
ldsString = strings.Replace(ldsString, edata_location, fmt.Sprintf(". = 0x%x;", locationCnt), -1)
locationCnt += elf64analyser.PageSize
linkerInfo.bssAddr = locationCnt
}
// Update rodata, data, bss, tbss
ldsString = strings.Replace(ldsString, sect.loc, fmt.Sprintf(". = 0x%x;", locationCnt), -1)
locationCnt += maxValSection[sect.sec]
locationCnt = roundAddr(locationCnt, elf64analyser.PageSize)
}
// Update intrstack
linkerInfo.ldsString = strings.Replace(ldsString, intrstack_location, fmt.Sprintf(". = 0x%x;", locationCnt), -1)
return linkerInfo
}

View file

@ -0,0 +1,187 @@
// Copyright 2019 The UNICORE Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file
//
// Author: Gaulthier Gain <gaulthier.gain@uliege.be>
package binarytool
import (
"encoding/json"
"errors"
"io/ioutil"
"os"
"path/filepath"
"strings"
"tools/srcs/binarytool/elf64analyser"
"tools/srcs/binarytool/elf64core"
u "tools/srcs/common"
)
const (
makefile = "Makefile"
config = "config"
ldExt = ".ld.o"
objectExt = ".o"
dbgExt = ".dbg"
)
type Unikernels struct {
Unikernel []Unikernel `json:"unikernels"`
}
type Unikernel struct {
BuildPath string `json:"buildPath"`
Kernel string `json:"kernel"`
SplitSections []string `json:"splitSections"`
DisplayMapping bool `json:"displayMapping"`
DisplayStatSize bool `json:"displayStatSize"`
ComputeLibsMapping bool `json:"computeLibsMapping"`
IgnoredPlats []string `json:"ignoredPlats"`
DisplayElfFile []string `json:"displayElfFile"`
DisplaySectionInfo []string `json:"displaySectionInfo"`
FindSectionByAddress []string `json:"findSectionByAddress"`
CompareGroup int `json:"compareGroup"`
// Used to generate new link.lds file
ComputeTextAddr string `json:"computeTextAddr"`
LibsMapping []string `json:"LibsMapping"`
ElfFile *elf64core.ELF64File
ListObjs []*elf64core.ELF64File
Analyser *elf64analyser.ElfAnalyser
}
func ReadJsonFile(path string) ([]*Unikernel, error) {
jsonFile, err := os.Open(path)
if err != nil {
return nil, err
}
defer jsonFile.Close()
byteValue, _ := ioutil.ReadAll(jsonFile)
unikernels := new(Unikernels)
if err := json.Unmarshal(byteValue, unikernels); err != nil {
return nil, err
}
uks := make([]*Unikernel, len(unikernels.Unikernel))
for i, _ := range unikernels.Unikernel {
uks[i] = &unikernels.Unikernel[i]
}
unikernels = nil
return uks, nil
}
func parseFile(path, name string) (*elf64core.ELF64File, error) {
var elfFile *elf64core.ELF64File
elfFile = new(elf64core.ELF64File)
if err := elfFile.ParseAll(path, name); err != nil {
return nil, err
}
return elfFile, nil
}
func (uk *Unikernel) GetKernel() error {
var err error
uk.ElfFile, err = parseFile("", uk.Kernel)
if err != nil {
return err
}
return nil
}
func (uk *Unikernel) GetFiles() error {
files, err := ioutil.ReadDir(uk.BuildPath)
if err != nil {
return err
}
uk.ListObjs = make([]*elf64core.ELF64File, 0)
foundExec := false
for _, f := range files {
if f.IsDir() || strings.Contains(f.Name(), makefile) ||
strings.Contains(f.Name(), config) {
continue
}
if filepath.Ext(f.Name()) == objectExt && !strings.Contains(f.Name(), ldExt) &&
!u.StringInSlice(f.Name(), uk.IgnoredPlats) {
objFile, err := parseFile(uk.BuildPath, f.Name())
if err != nil {
return err
}
uk.ListObjs = append(uk.ListObjs, objFile)
} else if filepath.Ext(strings.TrimSpace(f.Name())) == dbgExt &&
!u.StringInSlice(f.Name(), uk.IgnoredPlats) && !foundExec {
execName := f.Name()
if len(uk.Kernel) > 0 {
execName = uk.Kernel
}
uk.ElfFile, err = parseFile(uk.BuildPath, execName)
if err != nil {
return err
}
foundExec = true
}
}
if len(uk.Kernel) > 0 {
u.PrintInfo("Use specified ELF file: " + uk.ElfFile.Name + "(" + uk.BuildPath + ")")
} else if uk.ElfFile != nil {
u.PrintInfo("Use ELF file found in build folder: " + uk.ElfFile.Name)
}
if uk.ElfFile == nil {
return errors.New("impossible to find executable in the given folder: " + uk.BuildPath)
}
return nil
}
func (uk *Unikernel) displayAllElfInfo() {
uk.ElfFile.Header.DisplayHeader()
uk.ElfFile.SectionsTable.DisplaySections()
uk.ElfFile.DisplayRelocationTables()
uk.ElfFile.DisplaySymbolsTables()
uk.ElfFile.DynamicTable.DisplayDynamicEntries()
uk.ElfFile.SegmentsTable.DisplayProgramHeader()
uk.ElfFile.SegmentsTable.DisplaySegmentSectionMapping()
uk.ElfFile.DisplayNotes()
uk.ElfFile.DisplayFunctionsTables(false)
}
func (uk *Unikernel) DisplayElfInfo() {
if len(uk.DisplayElfFile) == 1 && uk.DisplayElfFile[0] == "all" {
uk.displayAllElfInfo()
} else {
for _, d := range uk.DisplayElfFile {
if d == "header" {
uk.ElfFile.Header.DisplayHeader()
} else if d == "sections" {
uk.ElfFile.SectionsTable.DisplaySections()
} else if d == "relocations" {
uk.ElfFile.DisplayRelocationTables()
} else if d == "symbols" {
uk.ElfFile.DisplaySymbolsTables()
} else if d == "dynamics" {
uk.ElfFile.DynamicTable.DisplayDynamicEntries()
} else if d == "segments" {
uk.ElfFile.SegmentsTable.DisplayProgramHeader()
} else if d == "mapping" {
uk.ElfFile.SegmentsTable.DisplaySegmentSectionMapping()
} else if d == "notes" {
uk.ElfFile.DisplayNotes()
} else if d == "functions" {
uk.ElfFile.DisplayFunctionsTables(false)
} else {
u.PrintWarning("No display configuration found for argument: " + d)
}
}
}
}

View file

@ -13,10 +13,11 @@ import (
)
const (
programArg = "program"
unikraftArg = "unikraft"
sourcesArg = "sources"
makefileArg = "makefile"
programArg = "program"
workspaceArg = "workspace"
sourcesArg = "sources"
objsArg = "objects"
makefileArg = "makefile"
)
// ParseArguments parses arguments of the application.
@ -27,11 +28,14 @@ func parseLocalArguments(p *argparse.Parser, args *u.Arguments) error {
args.InitArgParse(p, args, u.STRING, "p", programArg,
&argparse.Options{Required: true, Help: "Program name"})
args.InitArgParse(p, args, u.STRING, "u", unikraftArg,
&argparse.Options{Required: false, Help: "Unikraft Path"})
args.InitArgParse(p, args, u.STRING, "u", workspaceArg,
&argparse.Options{Required: false, Help: "Workspace Path"})
args.InitArgParse(p, args, u.STRING, "s", sourcesArg,
&argparse.Options{Required: true, Help: "App Sources " +
"Folder"})
args.InitArgParse(p, args, u.BOOL, "o", objsArg,
&argparse.Options{Required: false, Default: false, Help: "Add objects from external build system " +
"Folder"})
args.InitArgParse(p, args, u.STRING, "m", makefileArg,
&argparse.Options{Required: false, Help: "Add additional properties " +
"for Makefile"})

View file

@ -160,10 +160,10 @@ func updateConfig(kConfigMap map[string]*KConfig,
{"CONFIG_HAVE_BOOTENTRY", &v, configLine},
{"CONFIG_HAVE_SCHED", &v, configLine},
{"CONFIG_LIBUKARGPARSE", &v, configLine},
{"CONFIG_LIBUKCPIO", nil, commentedConfigLine},
{"CONFIG_LIBUKBUS", &v, configLine},
{"CONFIG_LIBUKSGLIST", &v, configLine},
{"CONFIG_LIBUKTIMECONV", &v, configLine},
// CONFIG build
{"CONFIG_OPTIMIZE_NONE", &v, configLine},
{"CONFIG_OPTIMIZE_PERF", nil, commentedConfigLine},
@ -204,9 +204,29 @@ func SetConfig(newConfigs []*KConfig, kConfigMap map[string]*KConfig,
// It returns a list of KConfig.
func matchLibsKconfig(conf string, kConfigMap map[string]*KConfig,
items []*KConfig, matchedLibs []string) []*KConfig {
v := "y"
switch conf {
case "CONFIG_LIBUKMMAP":
if u.Contains(matchedLibs, UKMMAP) {
configs := []*KConfig{
{"CONFIG_LIBUKMMAP", &v, configLine},
}
items = SetConfig(configs, kConfigMap, items)
}
case "CONFIG_LIBUKSIGNAL":
if u.Contains(matchedLibs, UKSIGNAL) {
configs := []*KConfig{
{"CONFIG_LIBUKSIGNAL", &v, configLine},
}
items = SetConfig(configs, kConfigMap, items)
}
case "CONFIG_LIBPOSIX_LIBDL":
if u.Contains(matchedLibs, POSIXLIBDL) {
configs := []*KConfig{
{"CONFIG_LIBPOSIX_LIBDL", &v, configLine},
}
items = SetConfig(configs, kConfigMap, items)
}
case "CONFIG_LIBPOSIX_PROCESS":
if u.Contains(matchedLibs, POSIXPROCESS) {
configs := []*KConfig{
@ -214,6 +234,13 @@ func matchLibsKconfig(conf string, kConfigMap map[string]*KConfig,
}
items = SetConfig(configs, kConfigMap, items)
}
case "CONFIG_LIBPOSIX_SYSINFO":
if u.Contains(matchedLibs, POSIXSYSINFO) {
configs := []*KConfig{
{"CONFIG_LIBPOSIX_SYSINFO", &v, configLine},
}
items = SetConfig(configs, kConfigMap, items)
}
case "CONFIG_LIBPOSIX_USER":
if u.Contains(matchedLibs, POSIXUSER) {
configs := []*KConfig{
@ -256,6 +283,9 @@ func matchLibsKconfig(conf string, kConfigMap map[string]*KConfig,
{"CONFIG_LIBVFSCORE", &v, configLine},
{"CONFIG_LIBRAMFS", nil, commentedConfigLine},
{"CONFIG_LIBDEVFS", &v, configLine},
{"CONFIG_LIBDEVFS_DEV_NULL", nil, commentedConfigLine},
{"CONFIG_LIBDEVFS_DEV_ZERO", nil, commentedConfigLine},
{"CONFIG_LIBDEVFS_DEV_STDOUT", nil, commentedConfigLine},
{"CONFIG_LIBDEVFS_USE_RAMFS", nil, commentedConfigLine},
{"#", nil, separatorLine},
{"# vfscore configuration", nil, headerLine},
@ -273,6 +303,7 @@ func matchLibsKconfig(conf string, kConfigMap map[string]*KConfig,
{"CONFIG_LIBNEWLIBM", &v, configLine},
{"CONFIG_LIBNEWLIBC_WANT_IO_C99_FORMATS", nil, commentedConfigLine},
{"CONFIG_LIBNEWLIBC_LINUX_ERRNO_EXTENSIONS", nil, commentedConfigLine},
{"CONFIG_LIBNEWLIBC_CRYPT", &v, configLine},
}
items = SetConfig(configs, kConfigMap, items)
}
@ -348,21 +379,22 @@ func matchLibsKconfig(conf string, kConfigMap map[string]*KConfig,
func addInternalConfig(conf string, kConfigMap map[string]*KConfig,
items []*KConfig) []*KConfig {
v := "y"
n := "n"
switch conf {
case "CONFIG_PLAT_XEN":
configs := []*KConfig{
{"CONFIG_PLAT_XEN", &v, configLine},
{"CONFIG_PLAT_XEN", &n, configLine},
{"CONFIG_XEN_HVMLITE", nil, commentedConfigLine},
{"", nil, lineFeed},
{"#", nil, separatorLine},
{"# Console Options", nil, headerLine},
{"#", nil, separatorLine},
{"CONFIG_XEN_KERNEL_HV_CONSOLE", &v, configLine},
{"CONFIG_XEN_KERNEL_HV_CONSOLE", &n, configLine},
{"CONFIG_XEN_KERNEL_EMG_CONSOLE", nil, commentedConfigLine},
{"CONFIG_XEN_DEBUG_HV_CONSOLE", &v, configLine},
{"CONFIG_XEN_DEBUG_HV_CONSOLE", &n, configLine},
{"CONFIG_XEN_DEBUG_EMG_CONSOLE", nil, commentedConfigLine},
{"CONFIG_XEN_PV_BUILD_P2M", &v, configLine},
{"CONFIG_XEN_GNTTAB", &v, configLine},
{"CONFIG_XEN_PV_BUILD_P2M", &n, configLine},
{"CONFIG_XEN_GNTTAB", &n, configLine},
{"CONFIG_XEN_XENBUS", nil, commentedConfigLine},
}
items = SetConfig(configs, kConfigMap, items)
@ -377,6 +409,10 @@ func addInternalConfig(conf string, kConfigMap map[string]*KConfig,
{"CONFIG_KVM_KERNEL_VGA_CONSOLE", &v, configLine},
{"CONFIG_KVM_DEBUG_SERIAL_CONSOLE", &v, configLine},
{"CONFIG_KVM_DEBUG_VGA_CONSOLE", &v, configLine},
{"CONFIG_KVM_SERIAL_BAUD_115200", &v, configLine},
{"CONFIG_KVM_SERIAL_BAUD_57600", nil, commentedConfigLine},
{"CONFIG_KVM_SERIAL_BAUD_38400", nil, commentedConfigLine},
{"CONFIG_KVM_SERIAL_BAUD_19200", nil, commentedConfigLine},
{"CONFIG_KVM_PCI", &v, configLine},
{"CONFIG_VIRTIO_BUS", &v, configLine},
{"", nil, lineFeed},
@ -398,9 +434,23 @@ func addInternalConfig(conf string, kConfigMap map[string]*KConfig,
var number = "60"
configs := []*KConfig{
{"CONFIG_LIBUKBOOT", &v, configLine},
{"CONFIG_LIBUKBOOT_BANNER", &v, configLine},
{"CONFIG_LIBUKBOOT_BANNER_NONE", nil, commentedConfigLine},
{"CONFIG_LIBUKBOOT_BANNER_MINIMAL", nil, commentedConfigLine},
{"CONFIG_LIBUKBOOT_BANNER_CLASSIC", nil, commentedConfigLine},
{"CONFIG_LIBUKBOOT_BANNER_POWEREDBY", &v, configLine},
{"CONFIG_LIBUKBOOT_BANNER_POWEREDBY_ANSI", nil, commentedConfigLine},
{"CONFIG_LIBUKBOOT_BANNER_POWEREDBY_ANSI2", nil, commentedConfigLine},
{"CONFIG_LIBUKBOOT_BANNER_POWEREDBY_EA", nil, commentedConfigLine},
{"CONFIG_LIBUKBOOT_BANNER_POWEREDBY_EAANSI", nil, commentedConfigLine},
{"CONFIG_LIBUKBOOT_BANNER_POWEREDBY_EAANSI2", nil, commentedConfigLine},
{"CONFIG_LIBUKBOOT_BANNER_POWEREDBY_U8", nil, commentedConfigLine},
{"CONFIG_LIBUKBOOT_BANNER_POWEREDBY_U8ANSI", nil, commentedConfigLine},
{"CONFIG_LIBUKBOOT_BANNER_POWEREDBY_U8ANSI2", nil, commentedConfigLine},
{"CONFIG_LIBUKBOOT_MAXNBARGS", &number, configLine},
{"CONFIG_LIBUKBOOT_INITALLOC", &v, configLine},
{"CONFIG_LIBUKBOOT_INITBBUDDY", &v, configLine},
{"CONFIG_LIBUKBOOT_INITREGION", nil, commentedConfigLine},
{"CONFIG_LIBUKBOOT_NOALLOC", nil, commentedConfigLine},
{"CONFIG_LIBUKDEBUG", &v, configLine},
{"CONFIG_LIBUKDEBUG_PRINTK", &v, configLine},
{"CONFIG_LIBUKDEBUG_PRINTK_INFO", &v, configLine},
@ -414,6 +464,8 @@ func addInternalConfig(conf string, kConfigMap map[string]*KConfig,
{"CONFIG_LIBUKDEBUG_REDIR_PRINTK", nil, commentedConfigLine},
{"CONFIG_LIBUKDEBUG_PRINT_TIME", nil, commentedConfigLine},
{"CONFIG_LIBUKDEBUG_PRINT_STACK", nil, commentedConfigLine},
{"CONFIG_LIBUKDEBUG_PRINT_SRCNAME", &v, configLine},
{"CONFIG_LIBUKDEBUG_ANSI_COLOR", &v, configLine},
{"CONFIG_LIBUKDEBUG_ENABLE_ASSERT", nil, commentedConfigLine},
{"CONFIG_LIBUKDEBUG_TRACEPOINTS", nil, commentedConfigLine},
}
@ -427,6 +479,7 @@ func addInternalConfig(conf string, kConfigMap map[string]*KConfig,
case "CONFIG_LIBUKALLOC":
configs := []*KConfig{
{"CONFIG_LIBUKALLOC", &v, configLine},
{"CONFIG_LIBUKALLOC_IFMALLOC", nil, commentedConfigLine},
{"CONFIG_LIBUKALLOC_IFPAGES", &v, configLine},
{"CONFIG_LIBUKALLOC_IFSTATS", nil, commentedConfigLine},
{"CONFIG_LIBUKALLOCBBUDDY", &v, configLine},
@ -462,6 +515,7 @@ func addInternalConfig(conf string, kConfigMap map[string]*KConfig,
{"CONFIG_LIBUKLOCK", &v, configLine},
{"CONFIG_LIBUKLOCK_SEMAPHORE", &v, configLine},
{"CONFIG_LIBUKLOCK_MUTEX", &v, configLine},
{"CONFIG_LIBUKLOCK_MUTEX_METRICS", nil, commentedConfigLine},
}
items = SetConfig(configs, kConfigMap, items)
}

View file

@ -30,19 +30,20 @@ const (
LIBUUID = "libuuid"
LIBUV = "libuv"
LIBV8 = "libv8"
LWIP = "lwip"
LWIP = "lib-lwip"
MICROPYTHON = "micropython"
MUSL = "musl"
NEWLIB = "newlib"
MUSL = "lib-musl"
NEWLIB = "lib-newlib"
NOBLIM = "noblim"
NOLIBC = "nolibc"
OPENSSL = "openssl"
OPENSSL = "lib-openssl"
PFS9 = "9pfs"
POSIXLIBDL = "posix-libdl"
POSIXPROCESS = "posix-process"
POSIXUSER = "posix-user"
PTHREADEMBEDDED = "pthread-embedded"
PTHREADPOOL = "pthreadpool"
POSIXLIBDL = "posix_libdl"
POSIXPROCESS = "posix_process"
POSIXUSER = "posix_user"
POSIXSYSINFO = "posix_sysinfo"
PTHREADEMBEDDED = "lib-pthread-embedded"
PTHREADPOOL = "lib-pthreadpool"
PYTHON = "python"
RAMFS = "ramfs"
SYSCALLSHIM = "syscallshim"
@ -54,6 +55,8 @@ const (
UKDEBUG = "ukdebug"
UKLOCK = "uklock"
UKMPI = "ukmpi"
UKMMAP = "ukmmap"
UKSIGNAL = "uksignal"
UKNETDEV = "uknetdev"
UKPCI = "ukpci"
UKSCHED = "uksched"
@ -64,5 +67,5 @@ const (
UKTIMECONV = "uktimeconv"
UKTIME = "uktime"
VFSCORE = "vfscore"
ZLIB = "zlib"
ZLIB = "lib-zlib"
)

View file

@ -7,18 +7,37 @@
package buildtool
import (
"encoding/json"
"io/ioutil"
"os"
"path"
"path/filepath"
"strings"
"sync"
u "tools/srcs/common"
)
const (
exportFile = "exportsyms.uk"
prefixUrl = "http://xenbits.xen.org/gitweb/?p=unikraft/libs/"
suffixUrl = ";a=blob_plain;f=exportsyms.uk;hb=refs/heads/staging"
JSON = ".json"
prefixUrl = "https://github.com/unikraft/"
)
type MicroLibFile struct {
Filename string
IsInternal bool
Functions []MicroLibsFunction `json:"functions"`
}
type MicroLibsFunction struct {
Name string `json:"name"`
ReturnValue string `json:"return_value"`
FullyQualified string `json:"fully_qualified"`
ArgsName []string `json:"args_name"`
ArgsType []string `json:"args_type"`
Headers []string `json:"headers"`
NbArgs int `json:"nb_args"`
Usage int `json:"usage"`
}
// -----------------------------Match micro-libs--------------------------------
// processSymbols adds symbols within the 'exportsyms.uk' file into a map.
@ -38,58 +57,79 @@ func processSymbols(microLib, output string, mapSymbols map[string][]string) {
// from Unikraft's internal libs and add them into a map.
//
// It returns an error if any, otherwise it returns nil.
func fetchSymbolsInternalLibs(unikraftLibs string,
func fetchSymbolsInternalLibs(folder string,
microLibs map[string][]string) error {
// Read files within the Unikraft directory
files, err := ioutil.ReadDir(unikraftLibs)
files, err := ioutil.ReadDir(folder)
if err != nil {
return err
}
// Read Unikraft internal libs symbols (exportsyms.uk)
for _, f := range files {
if f.IsDir() {
export := unikraftLibs + f.Name() + u.SEP + exportFile
if exists, _ := u.Exists(export); exists {
u.PrintInfo("Retrieving symbols of internal lib: " + f.Name())
b, _ := u.OpenTextFile(export)
processSymbols(f.Name(), string(b), microLibs)
for _, file := range files {
if filepath.Ext(file.Name()) == JSON {
microLibFile, err := readMicroLibJson(path.Join(folder, file.Name()))
microLibFile.IsInternal = true
if err != nil {
return err
}
libName := strings.Replace(file.Name(), JSON, "", -1)
u.PrintInfo("Retrieving symbols of internal lib: " + libName)
for _, functions := range microLibFile.Functions {
microLibs[functions.Name] = append(microLibs[functions.Name], libName)
}
}
}
return nil
}
// fetchSymbolsExternalLibs fetches all symbols within 'exportsyms.uk' files
// from Unikraft's external libs and add them into a map.
// readMicroLibJson reads symbols from external microlibs stored in json files.
//
// It returns a list of MicroLibFile and an error if any, otherwise it returns nil.
func readMicroLibJson(filename string) (*MicroLibFile, error) {
jsonFile, err := os.Open(filename)
if err != nil {
return nil, err
}
defer jsonFile.Close()
byteValue, _ := ioutil.ReadAll(jsonFile)
var functions = &MicroLibFile{Filename: filepath.Base(strings.Replace(filename, JSON, "", -1))}
if err := json.Unmarshal(byteValue, &functions); err != nil {
return nil, err
}
return functions, nil
}
// fetchSymbolsExternalLibs fetches all symbols files from Unikraft's external libs
// and add them into a map.
//
// It returns a list of symbols and an error if any, otherwise it returns nil.
func fetchSymbolsExternalLibs(url string,
func fetchSymbolsExternalLibs(folder string,
microLibs map[string][]string) (map[string]string, error) {
var externalLibs map[string]string
if body, err := u.DownloadFile(url); err != nil {
files, err := ioutil.ReadDir(folder)
if err != nil {
return nil, err
} else {
externalLibs = u.GitFindExternalLibs(*body)
}
var wg sync.WaitGroup
wg.Add(len(externalLibs))
// Iterate through all external libs to parse 'exportsyms.uk' file
for lib, git := range externalLibs {
// Use go routine to get better efficiency
go func(lib, git string, microLibs map[string][]string) {
defer wg.Done()
u.PrintInfo("Retrieving symbols of external lib: " + lib)
if symbols, err := u.DownloadFile(prefixUrl + git + suffixUrl); err != nil {
u.PrintWarning(err)
} else {
processSymbols(lib, *symbols, microLibs)
}
}(lib, git, microLibs)
externalLibs := make(map[string]string, len(files))
for _, file := range files {
if filepath.Ext(file.Name()) == JSON {
microLibFile, err := readMicroLibJson(path.Join(folder, file.Name()))
microLibFile.IsInternal = false
if err != nil {
return nil, err
}
libName := strings.Replace(file.Name(), JSON, "", -1)
u.PrintInfo("Retrieving symbols of external lib: " + libName)
for _, functions := range microLibFile.Functions {
microLibs[functions.Name] = append(microLibs[functions.Name], libName)
}
externalLibs[libName] = prefixUrl + libName + ".git"
}
wg.Wait()
}
return externalLibs, nil
}
@ -131,13 +171,20 @@ func matchLibs(unikraftLibs string, data *u.Data) ([]string, map[string]string,
mapSymbols := make(map[string][]string)
matchedLibs := make([]string, 0)
if err := fetchSymbolsInternalLibs(unikraftLibs, mapSymbols); err != nil {
//todo remove
matchedLibs = append(matchedLibs, POSIXLIBDL)
matchedLibs = append(matchedLibs, POSIXSYSINFO)
matchedLibs = append(matchedLibs, UKMMAP)
folder := filepath.Join(os.Getenv("GOPATH"), "src", "tools", "libs", "internal")
if err := fetchSymbolsInternalLibs(folder, mapSymbols); err != nil {
return nil, nil, err
}
// Get list of libs from xenbits
url := "http://xenbits.xen.org/gitweb/?pf=unikraft/libs"
externalLibs, err := fetchSymbolsExternalLibs(url, mapSymbols)
// Get list of libs from libs/external
folder = filepath.Join(os.Getenv("GOPATH"), "src", "tools", "libs", "external")
externalLibs, err := fetchSymbolsExternalLibs(folder, mapSymbols)
if err != nil {
return nil, nil, err
}
@ -154,10 +201,10 @@ func matchLibs(unikraftLibs string, data *u.Data) ([]string, map[string]string,
// -----------------------------Clone micro-libs--------------------------------
// cloneGitRepo clones a specific git repository that hosts an external
// micro-libs on http://xenbits.xen.org/
// micro-libs on http://github.com/
//
// It returns an error if any, otherwise it returns nil.
func cloneGitRepo(url, unikraftPathLibs string) error {
func cloneGitRepo(url, unikraftPathLibs, lib string) error {
u.PrintInfo("Clone git repository " + url)
if _, _, err := u.GitCloneRepository(url, unikraftPathLibs, true); err != nil {
@ -167,7 +214,7 @@ func cloneGitRepo(url, unikraftPathLibs string) error {
unikraftPathLibs)
u.PrintInfo("Git branch " + url)
if _, _, err := u.GitBranchStaging(unikraftPathLibs, true); err != nil {
if _, _, err := u.GitBranchStaging(unikraftPathLibs+lib, false); err != nil {
return err
}
@ -177,21 +224,20 @@ func cloneGitRepo(url, unikraftPathLibs string) error {
// cloneLibsFolders clones all the needed micro-libs that are needed by a
// given application
//
func cloneLibsFolders(unikraftPath string, matchedLibs []string,
func cloneLibsFolders(workspacePath string, matchedLibs []string,
externalLibs map[string]string) {
for _, lib := range matchedLibs {
if _, ok := externalLibs[lib]; ok {
exists, _ := u.Exists(unikraftPath + u.LIBSFOLDER + lib)
if value, ok := externalLibs[lib]; ok {
exists, _ := u.Exists(workspacePath + u.LIBSFOLDER + lib)
if !exists {
// If the micro-libs is not in the local host, clone it
if err := cloneGitRepo("git://xenbits.xen.org/unikraft/"+
"libs/"+lib+".git", unikraftPath+ u.LIBSFOLDER); err != nil {
if err := cloneGitRepo(value, workspacePath+u.LIBSFOLDER, lib); err != nil {
u.PrintWarning(err)
}
} else {
u.PrintInfo("Library " + lib + " already exists in folder" +
unikraftPath + u.LIBSFOLDER)
workspacePath + u.LIBSFOLDER)
}
}
}

View file

@ -13,7 +13,6 @@ import (
"path/filepath"
"regexp"
"strings"
u "tools/srcs/common"
)
@ -25,6 +24,7 @@ const (
)
const pageSize = 10
// -----------------------------Generate Config---------------------------------
// generateConfigUk generates a 'Config.uk' file for the Unikraft build system.
@ -112,7 +112,8 @@ func RunBuildTool(homeDir string, data *u.Data) {
// Init and parse local arguments
args := new(u.Arguments)
p, err := args.InitArguments()
p, err := args.InitArguments("--build",
"The Build tool allows to help developers to port an app as unikernel")
if err != nil {
u.PrintErr(err)
}
@ -128,15 +129,20 @@ func RunBuildTool(homeDir string, data *u.Data) {
programName = filepath.Base(programName)
}
var unikraftPath string
if len(*args.StringArg[unikraftArg]) == 0 {
path, err := setUnikraftFolder(homeDir + u.SEP)
var workspacePath = homeDir + u.SEP + u.WORKSPACEFOLDER
unikraftPath := workspacePath + u.UNIKRAFTFOLDER
if len(*args.StringArg[workspaceArg]) > 0 {
workspacePath = *args.StringArg[workspaceArg]
}
// Create workspace folder
if _, err := os.Stat(workspacePath); os.IsNotExist(err) {
err = setWorkspaceFolder(workspacePath)
if err != nil {
u.PrintErr(err)
}
unikraftPath = *path
} else {
unikraftPath = *args.StringArg[unikraftArg]
u.PrintInfo("Workspace folder already exists")
}
// Check if sources argument is set
@ -145,14 +151,13 @@ func RunBuildTool(homeDir string, data *u.Data) {
}
// Check if the unikraft folder contains the 3 required folders
if _, err := ioutil.ReadDir(unikraftPath); err != nil {
if _, err := ioutil.ReadDir(workspacePath); err != nil {
u.PrintErr(err)
} else {
path, err := setUnikraftSubFolders(homeDir + u.SEP + u.UNIKRAFTFOLDER)
err := setUnikraftSubFolders(workspacePath)
if err != nil {
u.PrintErr(err)
}
unikraftPath = *path
}
// If data is not initialized, read output from dependency analysis tool
@ -165,7 +170,7 @@ func RunBuildTool(homeDir string, data *u.Data) {
}
// Create unikraft application path
appFolderPtr, err := createUnikraftApp(programName, unikraftPath)
appFolderPtr, err := createUnikraftApp(programName, workspacePath)
if err != nil {
u.PrintErr(err)
}
@ -207,17 +212,16 @@ func RunBuildTool(homeDir string, data *u.Data) {
}
// Match micro-libs
matchedLibs, externalLibs, err := matchLibs(unikraftPath+"unikraft"+u.SEP+
"lib"+u.SEP, data)
matchedLibs, externalLibs, err := matchLibs(unikraftPath+"lib"+u.SEP, data)
if err != nil {
u.PrintErr(err)
}
// Clone the external git repositories
cloneLibsFolders(unikraftPath, matchedLibs, externalLibs)
cloneLibsFolders(workspacePath, matchedLibs, externalLibs)
// Match internal dependencies between micro-libs
if err := searchInternalDependencies(unikraftPath, &matchedLibs,
if err := searchInternalDependencies(workspacePath, &matchedLibs,
externalLibs); err != nil {
u.PrintErr(err)
}
@ -227,10 +231,10 @@ func RunBuildTool(homeDir string, data *u.Data) {
}
// Clone the external git repositories (if changed)
cloneLibsFolders(unikraftPath, matchedLibs, externalLibs)
cloneLibsFolders(workspacePath, matchedLibs, externalLibs)
// Generate Makefiles
if err := generateMake(programName, appFolder, unikraftPath, *args.StringArg[makefileArg],
if err := generateMake(programName, appFolder, workspacePath, *args.StringArg[makefileArg],
matchedLibs, selectedFiles, externalLibs); err != nil {
u.PrintErr(err)
}
@ -287,10 +291,10 @@ func searchInternalDependencies(unikraftPath string, matchedLibs *[]string,
return nil
}
func generateMake(programName, appFolder, unikraftPath, makefile string,
func generateMake(programName, appFolder, workspacePath, makefile string,
matchedLibs, sourceFiles []string, externalLibs map[string]string) error {
// Generate Makefile
if err := generateMakefile(appFolder+"Makefile", unikraftPath,
if err := generateMakefile(appFolder+"Makefile", workspacePath,
appFolder, matchedLibs, externalLibs); err != nil {
return err
}
@ -332,8 +336,7 @@ func deleteBuildFolder(appFolder string) {
func initConfig(appFolder string, matchedLibs []string) {
// Run make allNoConfig to generate a .config file
if strOut, strErr, err := u.ExecuteWaitCommand(appFolder, "make",
"allnoconfig"); err != nil {
if strOut, strErr, err := u.ExecuteWaitCommand(appFolder, "make", "allnoconfig"); err != nil {
u.PrintErr(err)
} else if len(*strErr) > 0 {
u.PrintErr("error during generating .config: " + *strErr)
@ -406,6 +409,6 @@ func runMake(programName, appFolder string) {
if state == compilerError {
u.PrintErr("Fix compilation errors")
} else if state == success {
u.PrintOk("Unikernel created in Folder: 'build/'")
u.PrintOk("Unikernel created in Folder: " + appFolder)
}
}

View file

@ -29,51 +29,44 @@ func createIncludeFolder(appFolder string) (*string, error) {
return &includeFolder, nil
}
// ----------------------------Set UNIKRAFT Folders-----------------------------
func setUnikraftFolder(homeDir string) (*string, error) {
// ----------------------------Set Workspace Folders-----------------------------
func setWorkspaceFolder(workspacePath string) error {
unikraftFolder := homeDir + u.UNIKRAFTFOLDER
created, err := u.CreateFolder(unikraftFolder)
_, err := u.CreateFolder(workspacePath)
if err != nil {
return nil, err
return err
}
if created {
setUnikraftSubFolders(unikraftFolder)
} else {
u.PrintInfo("Unikraft folder already exists")
return &unikraftFolder, nil
}
return &unikraftFolder, nil
return nil
}
func setUnikraftSubFolders(unikraftFolder string) (*string, error) {
func setUnikraftSubFolders(workspaceFolder string) error {
u.PrintInfo("Create Unikraft folder with apps and libs subfolders")
unikraftFolder := workspaceFolder + u.UNIKRAFTFOLDER
u.PrintInfo("Managing Unikraft main folder with apps and libs subfolders")
// Create 'apps' and 'libs' subfolders
if _, err := u.CreateFolder(unikraftFolder + u.APPSFOLDER); err != nil {
return nil, err
if _, err := u.CreateFolder(workspaceFolder + u.APPSFOLDER); err != nil {
return err
}
if _, err := u.CreateFolder(unikraftFolder + u.LIBSFOLDER); err != nil {
return nil, err
if _, err := u.CreateFolder(workspaceFolder + u.LIBSFOLDER); err != nil {
return err
}
// Download git repo of unikraft
if _, _, err := u.GitCloneRepository("git://xenbits.xen.org/unikraft/unikraft.git",
unikraftFolder, true); err != nil {
return nil, err
if _, err := os.Stat(unikraftFolder); os.IsNotExist(err) {
url := "https://github.com/unikraft/unikraft.git"
// Download git repo of unikraft
if _, _, err := u.GitCloneRepository(url, workspaceFolder, true); err != nil {
return err
}
}
// Use staging branch
if _, _, err := u.GitBranchStaging(unikraftFolder+"unikraft", true); err != nil {
return nil, err
if _, _, err := u.GitBranchStaging(unikraftFolder, false); err != nil {
return err
}
return &unikraftFolder, nil
return nil
}
// ---------------------------Check UNIKRAFT Folder-----------------------------
@ -100,13 +93,13 @@ func containsUnikraftFolders(files []os.FileInfo) bool {
// ---------------------------UNIKRAFT APP FOLDER-------------------------------
func createUnikraftApp(programName, unikraftPath string) (*string, error) {
func createUnikraftApp(programName, workspacePath string) (*string, error) {
var appFolder string
if unikraftPath[len(unikraftPath)-1] != os.PathSeparator {
appFolder = unikraftPath + u.SEP + u.APPSFOLDER + programName + u.SEP
if workspacePath[len(workspacePath)-1] != os.PathSeparator {
appFolder = workspacePath + u.SEP + u.APPSFOLDER + programName + u.SEP
} else {
appFolder = unikraftPath + u.APPSFOLDER + programName + u.SEP
appFolder = workspacePath + u.APPSFOLDER + programName + u.SEP
}
created, err := u.CreateFolder(appFolder)

View file

@ -22,12 +22,14 @@ const (
// Exported constants to determine which tool is used.
const (
CRAWLER = "crawler"
DEP = "dep"
BUILD = "build"
VERIF = "verif"
PERF = "perf"
BINARY = "binary"
CRAWLER = "crawler"
DEP = "dep"
BUILD = "build"
VERIF = "verif"
PERF = "perf"
BINARY = "binary"
ALIGNER = "aligner"
EXTRACTER = "extracter"
)
const (
@ -45,14 +47,13 @@ type Arguments struct {
// arguments.
//
// It returns a parser as well as an error if any, otherwise it returns nil.
func (args *Arguments) InitArguments() (*argparse.Parser, error) {
func (args *Arguments) InitArguments(name, description string) (*argparse.Parser, error) {
args.IntArg = make(map[string]*int)
args.BoolArg = make(map[string]*bool)
args.StringArg = make(map[string]*string)
p := argparse.NewParser("UNICORE toolchain",
"The UNICORE toolchain allows to build unikernels")
p := argparse.NewParser(name, description)
return p, nil
}
@ -88,16 +89,22 @@ func (*Arguments) ParseMainArguments(p *argparse.Parser, args *Arguments) error
Help: "Execute the binary analyser tool"})
args.InitArgParse(p, args, BOOL, "", DEP,
&argparse.Options{Required: false, Default: false,
Help: "Execute only the dependency analysis tool"})
Help: "Execute only the dependency analyser tool"})
args.InitArgParse(p, args, BOOL, "", BUILD,
&argparse.Options{Required: false, Default: false,
Help: "Execute only the automatic build tool"})
Help: "Execute only the semi-automatic build tool"})
args.InitArgParse(p, args, BOOL, "", VERIF,
&argparse.Options{Required: false, Default: false,
Help: "Execute only the verification tool"})
Help: "Execute only the output verification tool"})
args.InitArgParse(p, args, BOOL, "", PERF,
&argparse.Options{Required: false, Default: false,
Help: "Execute only the performance tool"})
args.InitArgParse(p, args, BOOL, "", ALIGNER,
&argparse.Options{Required: false, Default: false,
Help: "Execute only the aligner tool"})
args.InitArgParse(p, args, BOOL, "", EXTRACTER,
&argparse.Options{Required: false, Default: false,
Help: "Execute only the symbols extracter tool"})
// Parse only the two first arguments <program name, [tools]>
if len(os.Args) > 2 {

View file

@ -14,10 +14,10 @@ type Data struct {
// Exported struct that represents data for static dependency analysis.
type StaticData struct {
Dependencies map[string][]string `json:"dependencies"`
SharedLibs map[string][]string `json:"shared_libs"`
SystemCalls map[string]int `json:"system_calls"`
Symbols map[string]string `json:"symbols"`
Dependencies map[string][]string `json:"dependencies"`
}
// Exported struct that represents data for dynamic dependency analysis.

View file

@ -6,12 +6,7 @@
package common
import (
"regexp"
"strings"
)
const branch = "staging"
const branch = "RELEASE-0.7.0"
// GitCloneRepository clones a git repository at the the given url.
//
@ -32,12 +27,12 @@ func GitBranchStaging(dir string, v bool) (*string, *string, error) {
return strOut, strErr, err
}
if strings.Contains(*strOut, branch) || strings.Contains(*strErr, branch) {
PrintInfo("Checkout to " + branch)
return ExecuteRunCmd("git", dir, v, "checkout", branch)
}
//todo review
//if strings.Contains(*strOut, branch) || strings.Contains(*strErr, branch) {
return ExecuteRunCmd("git", dir, v, "checkout", branch)
//}
return strOut, strErr, err
//return strOut, strErr, err
}
// GitPull pulls the current git repository.
@ -47,21 +42,3 @@ func GitBranchStaging(dir string, v bool) (*string, *string, error) {
func GitPull(dir string, v bool) (*string, *string, error) {
return ExecuteRunCmd("git", dir, v, "pull")
}
// GitFindExternalLibs finds all the external libraries of Unikraft which are
// hosted on Xenbits.
//
// It returns a map of all the external libs of Unikraft.
func GitFindExternalLibs(output string) map[string]string {
var re = regexp.MustCompile(
`(?m)<a class="list"\s+href="(.*);a=summary">.*</a>`)
matches := re.FindAllStringSubmatch(output, -1)
externalLibs := make(map[string]string, len(matches))
for _, match := range matches {
git := strings.Split(match[1], "/")
lib := strings.Split(git[len(git)-1], ".git")
externalLibs[lib[0]] = git[len(git)-1]
}
return externalLibs
}

View file

@ -2,11 +2,12 @@ package common
// Exported constants for folder management
const (
APPSFOLDER = "apps" + SEP
UNIKRAFTFOLDER = "unikraft" + SEP
APPSFOLDER = "apps" + SEP
WORKSPACEFOLDER = "workspace" + SEP
UNIKRAFTFOLDER = "unikraft" + SEP
BUILDFOLDER = "build" + SEP
LIBSFOLDER = "libs" + SEP
INCLUDEFOLDER = "include" + SEP
LIBSFOLDER = "libs" + SEP
INCLUDEFOLDER = "include" + SEP
KVM_IMAGE = "_kvm-x86_64"
)

View file

@ -17,6 +17,15 @@ import (
"strings"
)
func StringInSlice(name string, plats []string) bool {
for _, plat := range plats {
if strings.Contains(name, plat) {
return true
}
}
return false
}
// Contains checks if a given slice contains a particular string.
//
// It returns true if the given contains the searched string.
@ -161,7 +170,7 @@ func WriteMapToFile(file *os.File, headerName string, in interface{}) error {
// RecordDataJson saves json into a json file named by filename.
//
// It returns an error if any, otherwise it returns nil.
func RecordDataJson(filename string, data *Data) error {
func RecordDataJson(filename string, data interface{}) error {
b, err := json.Marshal(data)
if err != nil {

View file

@ -10,7 +10,6 @@ import (
"os"
"path/filepath"
"strings"
"time"
u "tools/srcs/common"
)
@ -23,7 +22,8 @@ func RunCrawler() {
// Init and parse local arguments
args := new(u.Arguments)
p, err := args.InitArguments()
p, err := args.InitArguments("--crawler",
"The Crawler tool allows to analyse (internal) dependencies between micro-libs")
if err != nil {
u.PrintErr(err)
}
@ -35,6 +35,7 @@ func RunCrawler() {
fullSelect := *args.BoolArg[fullLibsArg]
var path string
var outputFilename = "output_"
if len(*args.StringArg[repoArg]) > 0 {
// Only one folder
path = *args.StringArg[repoArg]
@ -43,6 +44,12 @@ func RunCrawler() {
u.PrintErr()
}
repoName := *args.StringArg[repoArg]
if filepath.IsAbs(*args.StringArg[repoArg]) {
repoName = filepath.Base(*args.StringArg[repoArg])
}
outputFilename += repoName
} else if len(*args.StringArg[libsArg]) > 0 {
// Several folders within a list
@ -59,6 +66,12 @@ func RunCrawler() {
u.PrintErr(err)
}
}
fileLibsName := *args.StringArg[libsArg]
if filepath.IsAbs(*args.StringArg[libsArg]) {
fileLibsName = filepath.Base(*args.StringArg[libsArg])
}
outputFilename += strings.Replace(fileLibsName, ".txt", "", -1)
} else {
u.PrintErr("You must specify either -r (--repository) or -l (libs)")
}
@ -69,8 +82,7 @@ func RunCrawler() {
outFolder += string(os.PathSeparator)
}
outputPath := outFolder +
"output_" + time.Now().Format("20060102150405")
outputPath := outFolder + outputFilename
// Create the dependencies graph
u.GenerateGraph("Unikraft Crawler", outputPath, mapConfig,

View file

@ -32,11 +32,11 @@ const (
// ---------------------------------Read Json-----------------------------------
// readTestFileJson load Testing json from a json file named by filename.
// ReadTestFileJson load Testing json from a json file named by filename.
//
// It returns a Testing structure initialized and an error if any, otherwise it
// returns nil.
func readTestFileJson(filename string) (*Testing, error) {
func ReadTestFileJson(filename string) (*Testing, error) {
testingStruct := &Testing{}
jsonFile, err := os.Open(filename)
@ -73,7 +73,7 @@ func gatherDataAux(command, programPath, programName, option string,
testingStruct := &Testing{}
if len(dArgs.testFile) > 0 {
var err error
testingStruct, err = readTestFileJson(dArgs.testFile)
testingStruct, err = ReadTestFileJson(dArgs.testFile)
if err != nil {
u.PrintWarning("Cannot find test file: " + err.Error())
}

View file

@ -92,6 +92,11 @@ func parseDependencies(output string, data, dependenciesMap,
data[line] = nil
}
}
if len(listDep) == 0 {
listDep = append(listDep, output)
}
return listDep
}

View file

@ -20,7 +20,8 @@ func RunAnalyserTool(homeDir string, data *u.Data) {
// Init and parse local arguments
args := new(u.Arguments)
p, err := args.InitArguments()
p, err := args.InitArguments("--dep",
"The Dependencies analyser allows to extract specific information of a program")
if err != nil {
u.PrintErr(err)
}

View file

@ -119,9 +119,13 @@ func gatherDependencies(programName string, data *u.StaticData, v bool) error {
if len(output) > 0 {
// Parse package name
packageName := parsePackagesName(output)
if len(packageName) > 0 {
return executeDependAptCache(packageName, data, v)
if err := executeDependAptCache(packageName, data, v); err != nil {
u.PrintWarning(err)
}
if _, ok := data.Dependencies[packageName]; !ok {
data.Dependencies[packageName] = []string{""}
}
}
} else {
// Enter manually the name of the package
@ -177,8 +181,10 @@ func executeDependAptCache(programName string, data *u.StaticData,
data.Dependencies = make(map[string][]string)
dependenciesMap := make(map[string][]string)
printDep := make(map[string][]string)
_ = parseDependencies(output, data.Dependencies, dependenciesMap,
printDep, fullDeps, 0)
printDep, fullDeps, 5)
}
fmt.Println("----------------------------------------------")

View file

@ -11,7 +11,6 @@ import (
"fmt"
"math/rand"
"net"
"os"
"os/exec"
"strconv"
"strings"
@ -74,7 +73,7 @@ func checkTypeTest(testStruct *Testing) int {
// value. In addition an extra margin value is added (3sec).
//
// It returns a duration either in milliseconds or in seconds.
func setDurationTimeOut(t *Testing, dArgs DynamicArgs) time.Duration {
func setDurationTimeOut(t *Testing, waitTime int) time.Duration {
if checkTypeTest(t) != externalTesting {
// Compute the number of commands + execution time (+ 3 seconds safe margin)
@ -82,7 +81,7 @@ func setDurationTimeOut(t *Testing, dArgs DynamicArgs) time.Duration {
return time.Duration(totalMs) * time.Millisecond
}
return time.Duration(dArgs.waitTime+startupSec) * time.Second
return time.Duration(waitTime+startupSec) * time.Second
}
// runCommandTester run commands and captures stdout and stderr of a the
@ -93,8 +92,8 @@ func setDurationTimeOut(t *Testing, dArgs DynamicArgs) time.Duration {
func runCommandTester(programPath, programName, command, option string,
testStruct *Testing, dArgs DynamicArgs, data *u.DynamicData) (string, string) {
timeOut := setDurationTimeOut(testStruct, dArgs)
u.PrintInfo("Duration of " + programName + " : " + timeOut.String())
timeOut := setDurationTimeOut(testStruct, dArgs.waitTime)
u.PrintInfo("Max testing duration of " + programName + " : " + timeOut.String())
ctx, cancel := context.WithTimeout(context.Background(), timeOut)
defer cancel()
@ -107,9 +106,10 @@ func runCommandTester(programPath, programName, command, option string,
cmd.Stderr = bufErr // Add io.MultiWriter(os.Stderr) to record on stderr
if checkTypeTest(testStruct) == stdinTest {
cmd.Stdin = os.Stdin
for _, cmd := range testStruct.ListCommands {
bufIn.Write([]byte(cmd))
cmd.Stdin = bufIn
for _, cmds := range testStruct.ListCommands {
time.Sleep(100 * time.Millisecond)
bufIn.Write([]byte(cmds + "\n"))
}
}
@ -141,6 +141,62 @@ func runCommandTester(programPath, programName, command, option string,
return bufOut.String(), bufErr.String()
}
func RunVerifCommandTester(programPath, programName, option string, testStruct *Testing) (string, string) {
timeOut := setDurationTimeOut(testStruct, 1)
u.PrintInfo("Max testing duration of " + programName + " : " + timeOut.String())
ctx, cancel := context.WithTimeout(context.Background(), timeOut)
defer cancel()
args := strings.Fields(option)
cmd := exec.CommandContext(ctx, programPath, args...)
cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true}
bufOut, bufErr, bufIn := &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}
cmd.Stdout = bufOut // Add io.MultiWriter(os.Stdout) to record on stdout
cmd.Stderr = bufErr // Add io.MultiWriter(os.Stderr) to record on stderr
if checkTypeTest(testStruct) == stdinTest {
cmd.Stdin = bufIn
bufIn.Write([]byte(" \n"))
for _, cmds := range testStruct.ListCommands {
if strings.Contains(programName, "qemu-system-x86_64") {
time.Sleep(1000 * time.Millisecond)
} else {
time.Sleep(100 * time.Millisecond)
}
bufIn.Write([]byte(cmds + "\n"))
}
}
// Run the process
if err := cmd.Start(); err != nil {
u.PrintErr(err)
}
// Run a go routine to handle the tests
go func() {
if checkTypeTest(testStruct) != stdinTest {
VerifTester(testStruct)
// Kill the program after the tester has finished the job
if err := u.PKill(programName, syscall.SIGINT); err != nil {
u.PrintErr(err)
}
}
}()
// Ignore the error because the program is killed (waitTime)
_ = cmd.Wait()
if ctx.Err() == context.DeadlineExceeded {
u.PrintInfo("Time out during executing: " + cmd.String())
return bufOut.String(), bufErr.String()
}
return bufOut.String(), bufErr.String()
}
// Tester runs the executable file of a given application to perform tests to
// get program dependencies.
//
@ -182,6 +238,24 @@ func Tester(programName string, cmd *exec.Cmd, data *u.DynamicData,
}
}
func VerifTester(testStruct *Testing) {
// Wait until the program has started
time.Sleep(time.Second * startupSec)
u.PrintInfo("Run internal tests from test file")
// Launch execution tests
if checkTypeTest(testStruct) == execTest {
launchTestsExternal(testStruct)
} else if checkTypeTest(testStruct) == telnetTest {
if len(testStruct.AddressTelnet) == 0 || testStruct.PortTelnet == 0 {
u.PrintWarning("Cannot find Address and port for telnet " +
"within json file. Skip tests")
} else {
launchTelnetTest(testStruct)
}
}
}
//----------------------------------Tests---------------------------------------
// launchTestsExternal runs external tests written in the 'test.json' file.

View file

@ -0,0 +1,28 @@
// Copyright 2019 The UNICORE Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file
//
// Author: Gaulthier Gain <gaulthier.gain@uliege.be>
package extractertool
import (
"github.com/akamensky/argparse"
"os"
u "tools/srcs/common"
)
const (
library = "library"
workspaceArg = "workspace"
)
// parseLocalArguments parses arguments of the application.
func parseLocalArguments(p *argparse.Parser, args *u.Arguments) error {
args.InitArgParse(p, args, u.STRING, "l", library,
&argparse.Options{Required: true, Help: "Library name"})
args.InitArgParse(p, args, u.STRING, "u", workspaceArg,
&argparse.Options{Required: false, Help: "Workspace Path"})
return u.ParserWrapper(p, os.Args)
}

245
srcs/extractertool/parserClang.py Executable file
View file

@ -0,0 +1,245 @@
#!/usr/bin/env python3
#---------------------------------------------------------------------
# (*) Installation:
#
# pip3 install clang
#
# cd /usr/lib/x86_64-linux-gnu/
# sudo ln -s libclang-X.Y.so.1 libclang.so (X.Y the version number)
#
# (*) Run:
#
# python3 parserClang.py <filepath> [includepathsfile]
#
# where filepath can be a repository/folder or a file (c/cpp/h/hpp)
#
#
# Gaulthier Gain <gaulthier.gain@uliege.be>
# License: BSD
#---------------------------------------------------------------------
import getopt
import os
import sys
import json
import clang.cindex
import clang
import platform
from clang.cindex import CursorKind
from collections import Counter
verbose = False # Change it to verbose mode
global_funcs = Counter()
global_calls = Counter()
silent_flag = False
# Check if a path is a directory or a file
def check_input_path(path, includePaths):
if os.path.isdir(path):
iterate_root_folder(path, includePaths)
elif os.path.isfile(path):
check_type_file(path, includePaths)
else:
print("Unable to analyse this file")
exit(1)
def get_include_paths(rootdir, includepathsFile):
paths = []
with open(includepathsFile, 'r') as file:
for includePath in file.readlines():
path = '-isystem ' + rootdir + includePath.replace('\n', '')
paths.append(path)
return ' '.join(paths)
# Check type/exenstion of a given file
def check_type_file(filepath, includePaths):
cplusplusOptions = '-x c++ --std=c++11'
cOptions = ''
if includePaths is not None:
cplusplusOptions = cplusplusOptions + ' ' + includePaths
cOptions = cOptions + ' ' + includePaths
if silent_flag is False:
print("Gathering symbols of " + filepath)
if filepath.endswith(".cpp") or filepath.endswith(".hpp"):
parse_file(filepath, cplusplusOptions)
elif filepath.endswith(".c") or filepath.endswith(".h"):
parse_file(filepath, cOptions)
# Iterate through a root folder
def iterate_root_folder(rootdir, includePaths):
for subdir, dirs, files in os.walk(rootdir):
for file in files:
filepath = subdir + os.sep + file
check_type_file(filepath, includePaths)
# Print info about symbols (verbose mode)
def display_info_function(funcs, calls):
for f in funcs:
print(fully_qualified(f), f.location)
for c in calls:
if is_function_call(f, c):
print('-', c.location)
print()
# Parse a given file to generate a AST
def parse_file(filepath, arguments):
idx = clang.cindex.Index.create()
args = arguments.split()
tu = idx.parse(filepath, args=args)
funcs, calls = find_funcs_and_calls(tu)
if verbose:
display_info_function(funcs, calls)
print(list(tu.diagnostics))
# Retrieve a fully qualified function name (with namespaces)
def fully_qualified(c):
if c is None:
return ''
elif c.kind == CursorKind.TRANSLATION_UNIT:
return ''
else:
res = fully_qualified(c.semantic_parent)
if res != '':
return res + '::' + c.spelling
return c.spelling
# Determine where a call-expression cursor refers to a particular
# function declaration
def is_function_call(funcdecl, c):
defn = c.get_definition()
return (defn is not None) and (defn == funcdecl)
# Filter name to take only the function name (remove "(args)")
def filter_func_name(displayname):
if "(" in displayname:
funcName = displayname.split('(')[0]
else:
funcName = displayname
return funcName
# Retrieve lists of function declarations and call expressions in a
#translation unit
def find_funcs_and_calls(tu):
filename = tu.cursor.spelling
calls = []
funcs = []
for c in tu.cursor.walk_preorder():
if c.location.file is None:
pass
elif c.location.file.name != filename:
pass
elif c.kind == CursorKind.CALL_EXPR:
calls.append(c)
# filter name to take only the name if necessary
funcName = filter_func_name(c.displayname)
global_calls[funcName] += 1
elif c.kind == CursorKind.FUNCTION_DECL:
funcs.append(c)
# filter name to take only the name if necessary
funcName = filter_func_name(c.displayname)
global_funcs[funcName] += 1
return funcs, calls
# Write data to json file
def write_to_json(output_filename, data):
with open(output_filename + '.json', 'w') as fp:
json.dump(data, fp, indent=4, sort_keys=True)
# Open data to json file
def read_from_json(filename):
with open(output_filename + '.json', 'r') as fp:
data = json.load(fp)
return data
# Read the list of syscalls (text file)
def read_syscalls_list(filename):
syscalls = set()
with open(filename) as f:
for line in f:
syscalls.add(line.strip())
return syscalls
# Check which syscall is called
def compare_syscalls(syscalls):
if silent_flag is False:
print("Gathered syscalls from function calls:")
return [key for key in global_calls.keys() if key not in syscalls]
# Main function
def main():
optlist, args = getopt.getopt(sys.argv[1:], "o:qvt")
input_file_names = None
includepathsFile = None
output_file_name = None
textFormat = False
for opt in optlist:
if opt[0] == "-i":
includepathFile = opt[1]
if opt[0] == "-o":
output_file_name = opt[1]
if opt[0] == "-q":
global silent_flag
silent_flag = True
if opt[0] == "-v":
global verbose
verbose = True
if opt[0] == "-t":
textFormat = True
input_file_names = args
if len(input_file_names) == 0:
if silent_flag is False:
print("No input files supplied")
exit(1)
if includepathsFile is not None:
includePaths = get_include_paths(input_file_name, includepathsFile)
for input_file_name in input_file_names:
check_input_path(input_file_name, includePaths)
else:
for input_file_name in input_file_names:
check_input_path(input_file_name, None)
if silent_flag is False:
print("---------------------------------------------------------")
if textFormat:
i = 0
for key,value in global_funcs.items():
if i < len(global_funcs.items())-1:
print(key, end=',')
else:
print(key)
i = i + 1
else:
# Dump function declarations and calls to json
output_dikt = {
'functions':'',
'calls':''
}
output_dikt['functions'] = [{'name':key, 'value':value} for key,value in global_funcs.items()]
output_dikt['calls'] = [{'name':key, 'value':value} for key,value in global_calls.items()]
if includepathsFile is not None:
# Read syscalls from txt file
all_syscalls = read_syscalls_list('syscall_list.txt')
called_syscalls = compare_syscalls(all_syscalls)
output_dikt['syscalls'] = called_syscalls
output_file = sys.stdout
json.dump(output_dikt, output_file)
if __name__== "__main__":
if platform.system() == "Darwin":
clang.cindex.Config.set_library_file("/Applications/Xcode.app/Contents/Frameworks/libclang.dylib")
main()

View file

@ -0,0 +1,341 @@
package extractertool
import (
"errors"
"io"
"io/ioutil"
"net/http"
"os"
"os/exec"
"path/filepath"
"regexp"
"strconv"
"strings"
u "tools/srcs/common"
)
import "C"
const HTTP = "http"
const URL = "URL"
type MicroLibFile struct {
Functions []MicroLibsFunction `json:"functions"`
}
type MicroLibsFunction struct {
Name string `json:"name"`
}
type Variables struct {
name string
value string
}
func getMakefileSources(content string, mapSources map[string]string) {
var re = regexp.MustCompile(`(?m)\/.*\.c|\/.*\.h|\/.*\.cpp`)
for _, match := range re.FindAllString(content, -1) {
vars := strings.Split(match, "/")
mapSources[vars[len(vars)-1]] = match
}
}
func findVariables(content string, mapVariables map[string]*Variables) {
var re = regexp.MustCompile(`(?m)\$\([A-Z0-9_\-]*\)`)
for _, match := range re.FindAllString(content, -1) {
if _, ok := mapVariables[match]; !ok {
v := &Variables{
name: match[2 : len(match)-1],
value: "",
}
regexVar := regexp.MustCompile("(?m)" + v.name + "=.*$")
for _, matchVar := range regexVar.FindAllString(content, -1) {
v.value = matchVar
break
}
mapVariables[match] = v
}
}
}
func resolveVariables(mapVariables map[string]*Variables) {
for _, value := range mapVariables {
var re = regexp.MustCompile(`(?m)\$\([A-Z0-9_\-]*\)`)
resolved := false
varString := ""
for _, match := range re.FindAllString(value.value, -1) {
vars := strings.Split(mapVariables[match].value, "=")
if len(vars) > 1 {
varString = vars[1]
} else {
varString = mapVariables[match].value
}
value.value = strings.Replace(value.value, match, varString, -1)
resolved = true
}
if !resolved {
vars := strings.Split(value.value, "=")
if len(vars) > 1 {
varString = vars[1]
}
value.value = varString
}
}
}
func detectURL(mapVariables map[string]*Variables) *string {
for key, value := range mapVariables {
if strings.Contains(key, URL) && strings.Contains(value.value, HTTP) {
vars := strings.Split(value.value, "=")
if len(vars) > 1 {
return &vars[1]
}
return &value.value
}
}
return nil
}
//TODO REPLACE
func CreateFolder(path string) (bool, error) {
if _, err := os.Stat(path); os.IsNotExist(err) {
if err = os.Mkdir(path, 0755); err != nil {
return false, err
}
return true, nil
}
return false, nil
}
func DownloadFile(filepath string, url string) error {
// Get the data
resp, err := http.Get(url)
if err != nil {
return err
}
defer resp.Body.Close()
// Create the file
out, err := os.Create(filepath)
if err != nil {
return err
}
defer out.Close()
// Write the body to file
_, err = io.Copy(out, resp.Body)
return err
}
func findSourcesFiles(workspace string) ([]string, error) {
var filenames []string
err := filepath.Walk(workspace,
func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
ext := filepath.Ext(info.Name())
if ext == ".c" || ext == ".cpp" {
filenames = append(filenames, path)
}
return nil
})
if err != nil {
return nil, err
}
return filenames, nil
}
//TODO REPLACE
// ExecuteCommand a single command without displaying the output.
//
// It returns a string which represents stdout and an error if any, otherwise
// it returns nil.
func ExecuteCommand(command string, arguments []string) (string, error) {
out, err := exec.Command(command, arguments...).CombinedOutput()
if err != nil {
return "", err
}
return string(out), nil
}
func saveSymbols(output string, mapSymbols map[string]string, libName string) {
if strings.Contains(output, "\n") {
output = strings.TrimSuffix(output, "\n")
}
symbols := strings.Split(output, ",")
for _, s := range symbols {
if len(s) > 0 {
if _, ok := mapSymbols[s]; !ok {
if s == "main" || strings.Contains(s, "test") {
u.PrintWarning("Ignore function: " + s)
} else {
mapSymbols[s] = libName
}
}
}
}
}
func extractPrototype(sourcesFiltered []string, mapSymbols map[string]string, libName string) error {
for _, f := range sourcesFiltered {
script := filepath.Join(os.Getenv("GOPATH"), "src", "tools", "srcs", "extractertool", "parserClang.py")
output, err := ExecuteCommand("python3", []string{script, "-q", "-t", f})
if err != nil {
u.PrintWarning("Incomplete analysis with file " + f)
continue
}
saveSymbols(output, mapSymbols, libName)
}
return nil
}
func filterSourcesFiles(files []string, mapSources map[string]string) []string {
var sourcesFiltered []string
for _, f := range files {
vars := strings.Split(f, "/")
if len(vars) > 1 {
filename := vars[len(vars)-1]
if _, ok := mapSources[filename]; ok {
sourcesFiltered = append(sourcesFiltered, f)
}
}
}
return sourcesFiltered
}
func RunExtracterTool(homeDir string) {
// Init and parse local arguments
args := new(u.Arguments)
p, err := args.InitArguments("--extracter",
"The extracter tool allows to extract all the symbols (functions) of an external/internal library")
if err != nil {
u.PrintErr(err)
}
if err := parseLocalArguments(p, args); err != nil {
u.PrintErr(err)
}
var workspacePath = homeDir + u.SEP + u.WORKSPACEFOLDER
if len(*args.StringArg[workspaceArg]) > 0 {
workspacePath = *args.StringArg[workspaceArg]
}
libpath := *args.StringArg[library]
lib := libpath
if filepath.IsAbs(libpath) {
lib = filepath.Base(libpath)
} else {
libpath = filepath.Join(workspacePath, u.LIBSFOLDER, lib)
}
file, err := ioutil.ReadFile(filepath.Join(libpath, "Makefile.uk"))
if err != nil {
u.PrintErr(err)
}
mapVariables := make(map[string]*Variables)
content := string(file)
mapSources := make(map[string]string)
getMakefileSources(content, mapSources)
findVariables(content, mapVariables)
resolveVariables(mapVariables)
url := detectURL(mapVariables)
if url == nil {
u.PrintErr(errors.New("url of the lib not found"))
return
}
fileExtension := filepath.Ext(*url)
folderName := lib + "_sources_folder"
created, err := CreateFolder(folderName)
if err != nil {
u.PrintErr(err)
}
var files []string
archiveName := lib + "_sources" + fileExtension
if created {
u.PrintInfo(*url + " is found. Download the lib sources...")
err := DownloadFile(archiveName, *url)
if err != nil {
u.PrintErr(err)
}
u.PrintOk(*url + " successfully downloaded.")
u.PrintInfo("Extracting " + archiveName + "...")
if fileExtension == ".zip" {
files, err = Unzip(archiveName, folderName)
if err != nil {
_ = os.Remove(archiveName)
_ = os.RemoveAll(folderName)
u.PrintErr(err.Error() + ". Corrupted archive. Please try again.")
}
} else if fileExtension == ".tar" || fileExtension == ".gz" || fileExtension == ".tgz" {
files, err = unTarGz(archiveName, folderName)
if err != nil {
_ = os.Remove(archiveName)
_ = os.RemoveAll(folderName)
u.PrintErr(err.Error() + ". Corrupted archive. Please try again.")
}
} else {
u.PrintErr(errors.New("unknown extension for archive"))
}
}
if len(files) == 0 {
u.PrintInfo("Inspecting folder " + folderName + " for sources...")
files, err = findSourcesFiles(folderName)
if err != nil {
u.PrintErr(err)
}
}
sourcesFiltered := filterSourcesFiles(files, mapSources)
u.PrintInfo("Find " + strconv.Itoa(len(sourcesFiltered)) + " files to analyse")
mapSymbols := make(map[string]string)
u.PrintInfo("Extracting symbols from all sources of " + lib + ". This may take some times...")
if err := extractPrototype(sourcesFiltered, mapSymbols, lib); err != nil {
u.PrintErr(err)
}
mf := MicroLibFile{}
mf.Functions = make([]MicroLibsFunction, len(mapSymbols))
i := 0
for k, _ := range mapSymbols {
mf.Functions[i].Name = k
i++
}
u.PrintOk(strconv.Itoa(len(mapSymbols)) + " symbols from " + lib + " have been extracted.")
filename := filepath.Join(os.Getenv("GOPATH"), "src", "tools", "libs", "external", lib)
if err := u.RecordDataJson(filename, mf); err != nil {
u.PrintErr(err)
} else {
u.PrintOk("Symbols file have been written to " + filename + ".json")
}
u.PrintInfo("Remove folders " + archiveName + " and " + folderName)
_ = os.Remove(archiveName)
_ = os.RemoveAll(folderName)
}

View file

@ -0,0 +1,128 @@
package extractertool
import (
"archive/tar"
"archive/zip"
"compress/gzip"
"fmt"
"io"
"os"
"path/filepath"
"strings"
)
func unTarGz(src string, dest string) ([]string, error) {
var filenames []string
gzipStream, err := os.Open(src)
if err != nil {
return nil, err
}
uncompressedStream, err := gzip.NewReader(gzipStream)
if err != nil {
return nil, err
}
tarReader := tar.NewReader(uncompressedStream)
for true {
header, err := tarReader.Next()
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
switch header.Typeflag {
case tar.TypeDir:
if err := os.Mkdir(filepath.Join(dest, header.Name), 0755); err != nil {
println(err.Error())
return nil, err
}
case tar.TypeReg:
outFile, err := os.Create(filepath.Join(dest, header.Name))
if err != nil {
println(err.Error())
return nil, err
}
if _, err := io.Copy(outFile, tarReader); err != nil {
println(err.Error())
return nil, err
}
filenames = append(filenames, outFile.Name())
outFile.Close()
default:
continue
}
}
return filenames, nil
}
func Unzip(src string, dest string) ([]string, error) {
var filenames []string
r, err := zip.OpenReader(src)
if err != nil {
return filenames, err
}
defer r.Close()
for _, f := range r.File {
// Store filename/path for returning and using later on
fpath := filepath.Join(dest, f.Name)
// Check for ZipSlip. More Info: http://bit.ly/2MsjAWE
if !strings.HasPrefix(fpath, filepath.Clean(dest)+string(os.PathSeparator)) {
return nil, fmt.Errorf("%s: illegal file path", fpath)
}
filenames = append(filenames, fpath)
if f.FileInfo().IsDir() {
// Make Folder
err := os.MkdirAll(fpath, os.ModePerm)
if err != nil {
return nil, err
}
continue
}
// Make File
if err = os.MkdirAll(filepath.Dir(fpath), os.ModePerm); err != nil {
return nil, err
}
outFile, err := os.OpenFile(fpath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())
if err != nil {
return nil, err
}
rc, err := f.Open()
if err != nil {
return nil, err
}
_, err = io.Copy(outFile, rc)
// Close the file without defer to close before next iteration of loop
outFile.Close()
rc.Close()
if err != nil {
return nil, err
}
}
return filenames, nil
}

View file

@ -7,12 +7,15 @@
package main
import (
"os"
"os/user"
"tools/srcs/alignertool"
"tools/srcs/binarytool"
"tools/srcs/buildtool"
u "tools/srcs/common"
"tools/srcs/crawlertool"
"tools/srcs/dependtool"
"tools/srcs/extractertool"
"tools/srcs/veriftool"
)
@ -20,7 +23,8 @@ func main() {
// Init global arguments
args := new(u.Arguments)
parser, err := args.InitArguments()
parser, err := args.InitArguments("The UNICORE Toolchain",
"Toolkit with provides several tools to analyse, compare and build unikernels")
if err != nil {
u.PrintErr(err)
}
@ -57,26 +61,39 @@ func main() {
return
}
if *args.BoolArg[u.ALIGNER] {
u.PrintHeader1("(*) RUN ALIGNER TOOL")
alignertool.RunAligner(usr.HomeDir)
return
}
if *args.BoolArg[u.EXTRACTER] {
u.PrintHeader1("(*) RUN EXTRACTER TOOL")
extractertool.RunExtracterTool(usr.HomeDir)
return
}
if all || *args.BoolArg[u.DEP] {
// Initialize data
data = new(u.Data)
u.PrintHeader1("(1) RUN DEPENDENCIES ANALYSER")
u.PrintHeader1("(*) RUN DEPENDENCIES ANALYSER")
dependtool.RunAnalyserTool(usr.HomeDir, data)
}
if all || *args.BoolArg[u.BUILD] {
u.PrintHeader1("(2) AUTOMATIC BUILD TOOL")
u.PrintHeader1("(*) SEMI-AUTOMATIC BUILD TOOL")
buildtool.RunBuildTool(usr.HomeDir, data)
}
if all || *args.BoolArg[u.VERIF] {
u.PrintHeader1("(3) VERIFICATION TOOL")
veriftool.RunVerificationTool()
u.PrintHeader1("(*) OUTPUT VERIFICATION TOOL")
veriftool.RunVerificationTool(usr.HomeDir)
}
if all || *args.BoolArg[u.PERF] {
u.PrintHeader1("(4) PERFORMANCE OPTIMIZATION TOOL")
u.PrintHeader1("(*) PERFORMANCE OPTIMIZATION TOOL (see way-finder)")
os.Exit(1)
}
}

View file

@ -17,7 +17,7 @@ const (
testFileArg = "testFile"
configFileArg = "configFile"
optionsArg = "options"
unikraftArg = "unikraft"
workspaceArg = "workspace"
)
// ParseArguments parses arguments of the application.
@ -35,8 +35,8 @@ func parseLocalArguments(p *argparse.Parser, args *u.Arguments) error {
&argparse.Options{Required: false, Default: "", Help: "Extra options for " +
"launching program"})
args.InitArgParse(p, args, u.STRING, "u", unikraftArg,
&argparse.Options{Required: false, Help: "Unikraft Path"})
args.InitArgParse(p, args, u.STRING, "u", workspaceArg,
&argparse.Options{Required: false, Help: "Workspace Path"})
return u.ParserWrapper(p, os.Args)
}

View file

@ -7,24 +7,25 @@
package veriftool
import (
"bufio"
"errors"
"fmt"
"github.com/sergi/go-diff/diffmatchpatch"
"io/ioutil"
"log"
"os"
"path/filepath"
"strings"
u "tools/srcs/common"
"tools/srcs/dependtool"
)
const stdinCmd = "[STDIN]"
const testCmd = "[TEST]"
func RunVerificationTool() {
func RunVerificationTool(homeDir string) {
// Init and parse local arguments
args := new(u.Arguments)
p, err := args.InitArguments()
p, err := args.InitArguments("--verif",
"The Output verifier tool allows to compare the output of a program ported as unikernel")
if err != nil {
u.PrintErr(err)
}
@ -32,6 +33,12 @@ func RunVerificationTool() {
u.PrintErr(err)
}
// Get program path
programPath, err := u.GetProgramPath(&*args.StringArg[programArg])
if err != nil {
u.PrintErr("Could not determine program path", err)
}
// Get program Name
programName := *args.StringArg[programArg]
@ -40,25 +47,24 @@ func RunVerificationTool() {
programName = filepath.Base(programName)
}
unikraftPath := *args.StringArg[unikraftArg]
if len(unikraftPath) == 0 {
u.PrintErr("Unikraft folder must exist! Run the build tool before " +
"using the verification tool")
var workspacePath = homeDir + u.SEP + u.WORKSPACEFOLDER
if len(*args.StringArg[workspaceArg]) > 0 {
workspacePath = *args.StringArg[workspaceArg]
}
// Get the app folder
var appFolder string
if unikraftPath[len(unikraftPath)-1] != os.PathSeparator {
appFolder = unikraftPath + u.SEP + u.APPSFOLDER + programName + u.SEP
if workspacePath[len(workspacePath)-1] != os.PathSeparator {
appFolder = workspacePath + u.SEP + u.APPSFOLDER + programName + u.SEP
} else {
appFolder = unikraftPath + u.APPSFOLDER + programName + u.SEP
appFolder = workspacePath + u.APPSFOLDER + programName + u.SEP
}
// Get the build folder
buildAppFolder := appFolder + u.BUILDFOLDER
// Get KVM image
var kvmUnikernel string
var kvmUnikernelPath, kvmUnikernel string
if file, err := u.OSReadDir(buildAppFolder); err != nil {
u.PrintWarning(err)
} else {
@ -66,6 +72,7 @@ func RunVerificationTool() {
if !f.IsDir() && strings.Contains(f.Name(), u.KVM_IMAGE) &&
len(filepath.Ext(f.Name())) == 0 {
kvmUnikernel = f.Name()
kvmUnikernelPath = filepath.Join(buildAppFolder, f.Name())
}
}
}
@ -75,44 +82,73 @@ func RunVerificationTool() {
u.PrintWarning(errors.New("no KVM image found"))
}
// Filepath of output
unikernelFilename := appFolder + "output_" + kvmUnikernel + ".txt"
appFilename := appFolder + "output_" + programName + ".txt"
// Read test
argStdin := ""
if len(*args.StringArg[testFileArg]) > 0 {
var err error
var cmdTests []string
cmdTests, err = u.ReadLinesFile(*args.StringArg[testFileArg])
if err != nil {
u.PrintWarning("Cannot find test files" + err.Error())
testingStruct := &dependtool.Testing{}
if len(*args.StringArg[testFileArg]) > 0 {
var err error
testingStruct, err = dependtool.ReadTestFileJson(*args.StringArg[testFileArg])
if err != nil {
u.PrintWarning("Cannot find test file: " + err.Error())
}
}
if strings.Contains(cmdTests[0], stdinCmd) {
argStdin = strings.Join(cmdTests[1:], "")
argStdin += "\n"
} else if strings.Contains(cmdTests[0], testCmd) {
//todo add for other tests
option := ""
if len(*args.StringArg[optionsArg]) > 0 {
option = *args.StringArg[optionsArg]
}
str_b := ""
for i := 0; i < 45; i++ {
str_b += "\n"
}
outStr, _ := dependtool.RunVerifCommandTester(programPath, programName, option, testingStruct)
if err := u.WriteToFile(appFilename, []byte(str_b+outStr)); err != nil {
u.PrintWarning("Impossible to write the output of verification to " +
appFilename)
} else {
u.PrintInfo("Output of general application written to " + appFilename)
}
option = "-nographic -vga none -device isa-debug-exit -kernel " + kvmUnikernelPath
outStr, _ = dependtool.RunVerifCommandTester("qemu-system-x86_64", "qemu-system-x86_64", option, testingStruct)
if err := u.WriteToFile(unikernelFilename, []byte(str_b+outStr)); err != nil {
u.PrintWarning("Impossible to write the output of verification to " +
kvmUnikernel)
} else {
u.PrintInfo("Output of unikernel written to " + unikernelFilename)
}
} else {
// No test file
if err := testUnikernel(buildAppFolder+kvmUnikernel, unikernelFilename,
[]byte(argStdin)); err != nil {
u.PrintWarning("Impossible to write the output of verification to " +
unikernelFilename)
}
// Test general app
if err := testApp(programName, appFilename, []byte(argStdin)); err != nil {
u.PrintWarning("Impossible to write the output of verification to " +
appFilename)
} else {
u.PrintInfo("Output of general application writtent to " + appFilename)
}
}
// Test KVM app unikernel
unikernelFilename := appFolder + "output_" + kvmUnikernel + ".txt"
if err := testUnikernel(buildAppFolder+kvmUnikernel, unikernelFilename,
[]byte(argStdin)); err != nil {
u.PrintWarning("Impossible to write the output of verification to " +
unikernelFilename)
c := askForConfirmation("Do you want to see a diff between the two output")
if c {
u.PrintInfo("Comparison output:")
// Compare both output
fmt.Println(compareOutput(unikernelFilename, appFilename))
}
// Test general app
appFilename := appFolder + "output_" + programName + ".txt"
if err := testApp(programName, appFilename, []byte(argStdin)); err != nil {
u.PrintWarning("Impossible to write the output of verification to " +
unikernelFilename)
}
u.PrintInfo("Comparison output:")
// Compare both output
fmt.Println(compareOutput(unikernelFilename, appFilename))
}
func compareOutput(unikernelFilename, appFilename string) string {
@ -147,3 +183,28 @@ func testUnikernel(kvmUnikernel, outputFile string, argsStdin []byte) error {
return u.WriteToFile(outputFile, bOut)
}
// askForConfirmation asks the user for confirmation. A user must type in "yes" or "no" and
// then press enter. It has fuzzy matching, so "y", "Y", "yes", "YES", and "Yes" all count as
// confirmations. If the input is not recognized, it will ask again. The function does not return
// until it gets a valid response from the user.
func askForConfirmation(s string) bool {
reader := bufio.NewReader(os.Stdin)
for {
fmt.Printf("%s [y/n]: ", s)
response, err := reader.ReadString('\n')
if err != nil {
log.Fatal(err)
}
response = strings.ToLower(strings.TrimSpace(response))
if response == "y" || response == "yes" {
return true
} else if response == "n" || response == "no" {
return false
}
}
}

View file

@ -0,0 +1,41 @@
{
"typeTest": "stdin",
"timeMsCommand": 1000,
"listCommands": [
"create table tbl1(one varchar(10), two smallint);",
"insert into tbl1 values('hello!',10);",
"insert into tbl1 values('goodbye', 20);",
"select * from tbl1;",
".mode list",
"select * from tbl1;",
"select * from tbl1;",
".mode quote",
"select * from tbl1;",
".mode line",
"select * from tbl1;",
".mode column",
"select * from tbl1;",
".width 12 6",
"select * from tbl1;",
".header off",
"select * from tbl1;",
".mode insert new_table",
"select * from tbl1;",
".mode list",
"select * from tbl1;",
"SELECT * FROM tbl1;",
"SELECT * FROM tbl1;",
".help",
"CREATE TABLE selftest(tno INTEGER PRIMARY KEY, op TEXT, cmd TEXT, ans TEXT);",
"CREATE TABLE tbl2 (f1 varchar(30) primary key,f2 text,f3 real);",
"insert into tbl2 values(10, 'hello!',10);",
"insert into tbl2 values(20, 'hello!',20);",
"UPDATE tbl2 SET f2='salut' WHERE f2='hello!';",
"select * from tbl1;",
"select * from tbl2;",
"DELETE FROM tbl2 WHERE f2='salut';",
"DELETE FROM tbl2 WHERE f2='hello';",
"DROP TABLE tbl2;",
".quit"
]
}