isula-build:support save/load separated image
reason: 1. support save/load separated image
2. add relative test cases and bugfixes
Signed-off-by: DCCooper <1866858@gmail.com>
This commit is contained in:
parent
ab055f3f95
commit
3d943142b3
@ -1 +1 @@
|
||||
0.9.5-14
|
||||
0.9.5-15
|
||||
|
||||
@ -1 +1 @@
|
||||
530b4f0737b75f72ff4dfc8ab15494a6064b1a73
|
||||
42f0c2f0f09f978ca9282ae085e68fa5446f231b
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
Name: isula-build
|
||||
Version: 0.9.5
|
||||
Release: 14
|
||||
Release: 15
|
||||
Summary: A tool to build container images
|
||||
License: Mulan PSL V2
|
||||
URL: https://gitee.com/openeuler/isula-build
|
||||
@ -85,6 +85,12 @@ fi
|
||||
/usr/share/bash-completion/completions/isula-build
|
||||
|
||||
%changelog
|
||||
* Tue Nov 02 2021 lixiang <lixiang172@huawei.com> - 0.9.5-15
|
||||
- Type:requirement
|
||||
- CVE:NA
|
||||
- SUG:restart
|
||||
- DESC:support save/load separated image, add relative test cases and bugfixes as well
|
||||
|
||||
* Mon Oct 25 2021 DCCooper <1866858@gmail.com> - 0.9.5-14
|
||||
- Type:enhancement
|
||||
- CVE:NA
|
||||
|
||||
754
patch/0072-protocol-define-separator-protocol.patch
Normal file
754
patch/0072-protocol-define-separator-protocol.patch
Normal file
@ -0,0 +1,754 @@
|
||||
From 4de32e443640b4b4481c619aeb2571d1872f9008 Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Tue, 26 Oct 2021 14:18:34 +0800
|
||||
Subject: [PATCH 01/16] protocol:define separator protocol
|
||||
|
||||
reason: define separator protocol
|
||||
save: add SeparatorSave(base, lib, rename, dest)
|
||||
load: add LoadID, SeparatorLoad(app, dir, base, lib, skipCheck)
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
api/services/control.pb.go | 491 ++++++++++++++++++++++++++-----------
|
||||
api/services/control.proto | 36 +++
|
||||
2 files changed, 381 insertions(+), 146 deletions(-)
|
||||
|
||||
diff --git a/api/services/control.pb.go b/api/services/control.pb.go
|
||||
index 0c8b6394..4f386671 100644
|
||||
--- a/api/services/control.pb.go
|
||||
+++ b/api/services/control.pb.go
|
||||
@@ -1054,10 +1054,16 @@ func (m *LogoutResponse) GetResult() string {
|
||||
|
||||
type LoadRequest struct {
|
||||
// path is the path of loading file
|
||||
- Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
|
||||
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
- XXX_unrecognized []byte `json:"-"`
|
||||
- XXX_sizecache int32 `json:"-"`
|
||||
+ Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
|
||||
+ // loadID is the unique ID for each time load
|
||||
+ // also is the part of construct temporary path to
|
||||
+ // store transport file
|
||||
+ LoadID string `protobuf:"bytes,2,opt,name=loadID,proto3" json:"loadID,omitempty"`
|
||||
+ // SeparatorLoad is the info to load separated image
|
||||
+ Sep *SeparatorLoad `protobuf:"bytes,3,opt,name=sep,proto3" json:"sep,omitempty"`
|
||||
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
+ XXX_unrecognized []byte `json:"-"`
|
||||
+ XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *LoadRequest) Reset() { *m = LoadRequest{} }
|
||||
@@ -1091,6 +1097,104 @@ func (m *LoadRequest) GetPath() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
+func (m *LoadRequest) GetLoadID() string {
|
||||
+ if m != nil {
|
||||
+ return m.LoadID
|
||||
+ }
|
||||
+ return ""
|
||||
+}
|
||||
+
|
||||
+func (m *LoadRequest) GetSep() *SeparatorLoad {
|
||||
+ if m != nil {
|
||||
+ return m.Sep
|
||||
+ }
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+type SeparatorLoad struct {
|
||||
+ // app is application image name
|
||||
+ App string `protobuf:"bytes,1,opt,name=app,proto3" json:"app,omitempty"`
|
||||
+ // dir is image tarballs directory
|
||||
+ Dir string `protobuf:"bytes,2,opt,name=dir,proto3" json:"dir,omitempty"`
|
||||
+ // base is base image tarball path
|
||||
+ Base string `protobuf:"bytes,3,opt,name=base,proto3" json:"base,omitempty"`
|
||||
+ // lib is library image tarball path
|
||||
+ Lib string `protobuf:"bytes,4,opt,name=lib,proto3" json:"lib,omitempty"`
|
||||
+ // skipCheck is flag to skip sha256 check sum for images
|
||||
+ SkipCheck bool `protobuf:"varint,5,opt,name=skipCheck,proto3" json:"skipCheck,omitempty"`
|
||||
+ // enabled is flag to indicate the separator function enabled or not
|
||||
+ Enabled bool `protobuf:"varint,6,opt,name=enabled,proto3" json:"enabled,omitempty"`
|
||||
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
+ XXX_unrecognized []byte `json:"-"`
|
||||
+ XXX_sizecache int32 `json:"-"`
|
||||
+}
|
||||
+
|
||||
+func (m *SeparatorLoad) Reset() { *m = SeparatorLoad{} }
|
||||
+func (m *SeparatorLoad) String() string { return proto.CompactTextString(m) }
|
||||
+func (*SeparatorLoad) ProtoMessage() {}
|
||||
+func (*SeparatorLoad) Descriptor() ([]byte, []int) {
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{19}
|
||||
+}
|
||||
+func (m *SeparatorLoad) XXX_Unmarshal(b []byte) error {
|
||||
+ return xxx_messageInfo_SeparatorLoad.Unmarshal(m, b)
|
||||
+}
|
||||
+func (m *SeparatorLoad) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
+ return xxx_messageInfo_SeparatorLoad.Marshal(b, m, deterministic)
|
||||
+}
|
||||
+func (m *SeparatorLoad) XXX_Merge(src proto.Message) {
|
||||
+ xxx_messageInfo_SeparatorLoad.Merge(m, src)
|
||||
+}
|
||||
+func (m *SeparatorLoad) XXX_Size() int {
|
||||
+ return xxx_messageInfo_SeparatorLoad.Size(m)
|
||||
+}
|
||||
+func (m *SeparatorLoad) XXX_DiscardUnknown() {
|
||||
+ xxx_messageInfo_SeparatorLoad.DiscardUnknown(m)
|
||||
+}
|
||||
+
|
||||
+var xxx_messageInfo_SeparatorLoad proto.InternalMessageInfo
|
||||
+
|
||||
+func (m *SeparatorLoad) GetApp() string {
|
||||
+ if m != nil {
|
||||
+ return m.App
|
||||
+ }
|
||||
+ return ""
|
||||
+}
|
||||
+
|
||||
+func (m *SeparatorLoad) GetDir() string {
|
||||
+ if m != nil {
|
||||
+ return m.Dir
|
||||
+ }
|
||||
+ return ""
|
||||
+}
|
||||
+
|
||||
+func (m *SeparatorLoad) GetBase() string {
|
||||
+ if m != nil {
|
||||
+ return m.Base
|
||||
+ }
|
||||
+ return ""
|
||||
+}
|
||||
+
|
||||
+func (m *SeparatorLoad) GetLib() string {
|
||||
+ if m != nil {
|
||||
+ return m.Lib
|
||||
+ }
|
||||
+ return ""
|
||||
+}
|
||||
+
|
||||
+func (m *SeparatorLoad) GetSkipCheck() bool {
|
||||
+ if m != nil {
|
||||
+ return m.SkipCheck
|
||||
+ }
|
||||
+ return false
|
||||
+}
|
||||
+
|
||||
+func (m *SeparatorLoad) GetEnabled() bool {
|
||||
+ if m != nil {
|
||||
+ return m.Enabled
|
||||
+ }
|
||||
+ return false
|
||||
+}
|
||||
+
|
||||
type LoadResponse struct {
|
||||
// log is the log sent to client
|
||||
Log string `protobuf:"bytes,1,opt,name=log,proto3" json:"log,omitempty"`
|
||||
@@ -1103,7 +1207,7 @@ func (m *LoadResponse) Reset() { *m = LoadResponse{} }
|
||||
func (m *LoadResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*LoadResponse) ProtoMessage() {}
|
||||
func (*LoadResponse) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{19}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{20}
|
||||
}
|
||||
func (m *LoadResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_LoadResponse.Unmarshal(m, b)
|
||||
@@ -1146,7 +1250,7 @@ func (m *PushRequest) Reset() { *m = PushRequest{} }
|
||||
func (m *PushRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*PushRequest) ProtoMessage() {}
|
||||
func (*PushRequest) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{20}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{21}
|
||||
}
|
||||
func (m *PushRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_PushRequest.Unmarshal(m, b)
|
||||
@@ -1199,7 +1303,7 @@ func (m *PushResponse) Reset() { *m = PushResponse{} }
|
||||
func (m *PushResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*PushResponse) ProtoMessage() {}
|
||||
func (*PushResponse) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{21}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{22}
|
||||
}
|
||||
func (m *PushResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_PushResponse.Unmarshal(m, b)
|
||||
@@ -1240,7 +1344,7 @@ func (m *PullRequest) Reset() { *m = PullRequest{} }
|
||||
func (m *PullRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*PullRequest) ProtoMessage() {}
|
||||
func (*PullRequest) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{22}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{23}
|
||||
}
|
||||
func (m *PullRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_PullRequest.Unmarshal(m, b)
|
||||
@@ -1286,7 +1390,7 @@ func (m *PullResponse) Reset() { *m = PullResponse{} }
|
||||
func (m *PullResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*PullResponse) ProtoMessage() {}
|
||||
func (*PullResponse) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{23}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{24}
|
||||
}
|
||||
func (m *PullResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_PullResponse.Unmarshal(m, b)
|
||||
@@ -1323,17 +1427,19 @@ type SaveRequest struct {
|
||||
// path is location for output tarball
|
||||
Path string `protobuf:"bytes,3,opt,name=path,proto3" json:"path,omitempty"`
|
||||
// format is the format of image saved to archive file, such as docker-archive, oci-archive
|
||||
- Format string `protobuf:"bytes,4,opt,name=format,proto3" json:"format,omitempty"`
|
||||
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
- XXX_unrecognized []byte `json:"-"`
|
||||
- XXX_sizecache int32 `json:"-"`
|
||||
+ Format string `protobuf:"bytes,4,opt,name=format,proto3" json:"format,omitempty"`
|
||||
+ // SeparatorSave is the info to save separated image
|
||||
+ Sep *SeparatorSave `protobuf:"bytes,5,opt,name=sep,proto3" json:"sep,omitempty"`
|
||||
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
+ XXX_unrecognized []byte `json:"-"`
|
||||
+ XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *SaveRequest) Reset() { *m = SaveRequest{} }
|
||||
func (m *SaveRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*SaveRequest) ProtoMessage() {}
|
||||
func (*SaveRequest) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{24}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{25}
|
||||
}
|
||||
func (m *SaveRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_SaveRequest.Unmarshal(m, b)
|
||||
@@ -1381,6 +1487,88 @@ func (m *SaveRequest) GetFormat() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
+func (m *SaveRequest) GetSep() *SeparatorSave {
|
||||
+ if m != nil {
|
||||
+ return m.Sep
|
||||
+ }
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+type SeparatorSave struct {
|
||||
+ // base is base image name
|
||||
+ Base string `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"`
|
||||
+ // lib is library image name
|
||||
+ Lib string `protobuf:"bytes,2,opt,name=lib,proto3" json:"lib,omitempty"`
|
||||
+ // rename is rename json file
|
||||
+ Rename string `protobuf:"bytes,3,opt,name=rename,proto3" json:"rename,omitempty"`
|
||||
+ // dest is destination file directory
|
||||
+ Dest string `protobuf:"bytes,4,opt,name=dest,proto3" json:"dest,omitempty"`
|
||||
+ // enabled is flag to indicate the separator function enabled or not
|
||||
+ Enabled bool `protobuf:"varint,5,opt,name=enabled,proto3" json:"enabled,omitempty"`
|
||||
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
+ XXX_unrecognized []byte `json:"-"`
|
||||
+ XXX_sizecache int32 `json:"-"`
|
||||
+}
|
||||
+
|
||||
+func (m *SeparatorSave) Reset() { *m = SeparatorSave{} }
|
||||
+func (m *SeparatorSave) String() string { return proto.CompactTextString(m) }
|
||||
+func (*SeparatorSave) ProtoMessage() {}
|
||||
+func (*SeparatorSave) Descriptor() ([]byte, []int) {
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{26}
|
||||
+}
|
||||
+func (m *SeparatorSave) XXX_Unmarshal(b []byte) error {
|
||||
+ return xxx_messageInfo_SeparatorSave.Unmarshal(m, b)
|
||||
+}
|
||||
+func (m *SeparatorSave) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
+ return xxx_messageInfo_SeparatorSave.Marshal(b, m, deterministic)
|
||||
+}
|
||||
+func (m *SeparatorSave) XXX_Merge(src proto.Message) {
|
||||
+ xxx_messageInfo_SeparatorSave.Merge(m, src)
|
||||
+}
|
||||
+func (m *SeparatorSave) XXX_Size() int {
|
||||
+ return xxx_messageInfo_SeparatorSave.Size(m)
|
||||
+}
|
||||
+func (m *SeparatorSave) XXX_DiscardUnknown() {
|
||||
+ xxx_messageInfo_SeparatorSave.DiscardUnknown(m)
|
||||
+}
|
||||
+
|
||||
+var xxx_messageInfo_SeparatorSave proto.InternalMessageInfo
|
||||
+
|
||||
+func (m *SeparatorSave) GetBase() string {
|
||||
+ if m != nil {
|
||||
+ return m.Base
|
||||
+ }
|
||||
+ return ""
|
||||
+}
|
||||
+
|
||||
+func (m *SeparatorSave) GetLib() string {
|
||||
+ if m != nil {
|
||||
+ return m.Lib
|
||||
+ }
|
||||
+ return ""
|
||||
+}
|
||||
+
|
||||
+func (m *SeparatorSave) GetRename() string {
|
||||
+ if m != nil {
|
||||
+ return m.Rename
|
||||
+ }
|
||||
+ return ""
|
||||
+}
|
||||
+
|
||||
+func (m *SeparatorSave) GetDest() string {
|
||||
+ if m != nil {
|
||||
+ return m.Dest
|
||||
+ }
|
||||
+ return ""
|
||||
+}
|
||||
+
|
||||
+func (m *SeparatorSave) GetEnabled() bool {
|
||||
+ if m != nil {
|
||||
+ return m.Enabled
|
||||
+ }
|
||||
+ return false
|
||||
+}
|
||||
+
|
||||
type SaveResponse struct {
|
||||
// log is log send to cli
|
||||
Log string `protobuf:"bytes,1,opt,name=log,proto3" json:"log,omitempty"`
|
||||
@@ -1393,7 +1581,7 @@ func (m *SaveResponse) Reset() { *m = SaveResponse{} }
|
||||
func (m *SaveResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*SaveResponse) ProtoMessage() {}
|
||||
func (*SaveResponse) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{25}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{27}
|
||||
}
|
||||
func (m *SaveResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_SaveResponse.Unmarshal(m, b)
|
||||
@@ -1438,7 +1626,7 @@ func (m *MemData) Reset() { *m = MemData{} }
|
||||
func (m *MemData) String() string { return proto.CompactTextString(m) }
|
||||
func (*MemData) ProtoMessage() {}
|
||||
func (*MemData) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{26}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{28}
|
||||
}
|
||||
func (m *MemData) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_MemData.Unmarshal(m, b)
|
||||
@@ -1508,7 +1696,7 @@ func (m *MemStat) Reset() { *m = MemStat{} }
|
||||
func (m *MemStat) String() string { return proto.CompactTextString(m) }
|
||||
func (*MemStat) ProtoMessage() {}
|
||||
func (*MemStat) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{27}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{29}
|
||||
}
|
||||
func (m *MemStat) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_MemStat.Unmarshal(m, b)
|
||||
@@ -1584,7 +1772,7 @@ func (m *StorageData) Reset() { *m = StorageData{} }
|
||||
func (m *StorageData) String() string { return proto.CompactTextString(m) }
|
||||
func (*StorageData) ProtoMessage() {}
|
||||
func (*StorageData) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{28}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{30}
|
||||
}
|
||||
func (m *StorageData) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_StorageData.Unmarshal(m, b)
|
||||
@@ -1634,7 +1822,7 @@ func (m *RegistryData) Reset() { *m = RegistryData{} }
|
||||
func (m *RegistryData) String() string { return proto.CompactTextString(m) }
|
||||
func (*RegistryData) ProtoMessage() {}
|
||||
func (*RegistryData) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{29}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{31}
|
||||
}
|
||||
func (m *RegistryData) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_RegistryData.Unmarshal(m, b)
|
||||
@@ -1686,7 +1874,7 @@ func (m *InfoRequest) Reset() { *m = InfoRequest{} }
|
||||
func (m *InfoRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*InfoRequest) ProtoMessage() {}
|
||||
func (*InfoRequest) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{30}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{32}
|
||||
}
|
||||
func (m *InfoRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_InfoRequest.Unmarshal(m, b)
|
||||
@@ -1743,7 +1931,7 @@ func (m *InfoResponse) Reset() { *m = InfoResponse{} }
|
||||
func (m *InfoResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*InfoResponse) ProtoMessage() {}
|
||||
func (*InfoResponse) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{31}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{33}
|
||||
}
|
||||
func (m *InfoResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_InfoResponse.Unmarshal(m, b)
|
||||
@@ -1845,7 +2033,7 @@ func (m *ManifestCreateRequest) Reset() { *m = ManifestCreateRequest{} }
|
||||
func (m *ManifestCreateRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*ManifestCreateRequest) ProtoMessage() {}
|
||||
func (*ManifestCreateRequest) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{32}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{34}
|
||||
}
|
||||
func (m *ManifestCreateRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ManifestCreateRequest.Unmarshal(m, b)
|
||||
@@ -1890,7 +2078,7 @@ func (m *ManifestCreateResponse) Reset() { *m = ManifestCreateResponse{}
|
||||
func (m *ManifestCreateResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*ManifestCreateResponse) ProtoMessage() {}
|
||||
func (*ManifestCreateResponse) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{33}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{35}
|
||||
}
|
||||
func (m *ManifestCreateResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ManifestCreateResponse.Unmarshal(m, b)
|
||||
@@ -1933,7 +2121,7 @@ func (m *ManifestAnnotateRequest) Reset() { *m = ManifestAnnotateRequest
|
||||
func (m *ManifestAnnotateRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*ManifestAnnotateRequest) ProtoMessage() {}
|
||||
func (*ManifestAnnotateRequest) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{34}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{36}
|
||||
}
|
||||
func (m *ManifestAnnotateRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ManifestAnnotateRequest.Unmarshal(m, b)
|
||||
@@ -2006,7 +2194,7 @@ func (m *ManifestInspectRequest) Reset() { *m = ManifestInspectRequest{}
|
||||
func (m *ManifestInspectRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*ManifestInspectRequest) ProtoMessage() {}
|
||||
func (*ManifestInspectRequest) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{35}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{37}
|
||||
}
|
||||
func (m *ManifestInspectRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ManifestInspectRequest.Unmarshal(m, b)
|
||||
@@ -2044,7 +2232,7 @@ func (m *ManifestInspectResponse) Reset() { *m = ManifestInspectResponse
|
||||
func (m *ManifestInspectResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*ManifestInspectResponse) ProtoMessage() {}
|
||||
func (*ManifestInspectResponse) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{36}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{38}
|
||||
}
|
||||
func (m *ManifestInspectResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ManifestInspectResponse.Unmarshal(m, b)
|
||||
@@ -2083,7 +2271,7 @@ func (m *ManifestPushRequest) Reset() { *m = ManifestPushRequest{} }
|
||||
func (m *ManifestPushRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*ManifestPushRequest) ProtoMessage() {}
|
||||
func (*ManifestPushRequest) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{37}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{39}
|
||||
}
|
||||
func (m *ManifestPushRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ManifestPushRequest.Unmarshal(m, b)
|
||||
@@ -2128,7 +2316,7 @@ func (m *ManifestPushResponse) Reset() { *m = ManifestPushResponse{} }
|
||||
func (m *ManifestPushResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*ManifestPushResponse) ProtoMessage() {}
|
||||
func (*ManifestPushResponse) Descriptor() ([]byte, []int) {
|
||||
- return fileDescriptor_d71ef680555cb937, []int{38}
|
||||
+ return fileDescriptor_d71ef680555cb937, []int{40}
|
||||
}
|
||||
func (m *ManifestPushResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ManifestPushResponse.Unmarshal(m, b)
|
||||
@@ -2177,12 +2365,14 @@ func init() {
|
||||
proto.RegisterType((*LogoutRequest)(nil), "isula.build.v1.LogoutRequest")
|
||||
proto.RegisterType((*LogoutResponse)(nil), "isula.build.v1.LogoutResponse")
|
||||
proto.RegisterType((*LoadRequest)(nil), "isula.build.v1.LoadRequest")
|
||||
+ proto.RegisterType((*SeparatorLoad)(nil), "isula.build.v1.SeparatorLoad")
|
||||
proto.RegisterType((*LoadResponse)(nil), "isula.build.v1.LoadResponse")
|
||||
proto.RegisterType((*PushRequest)(nil), "isula.build.v1.PushRequest")
|
||||
proto.RegisterType((*PushResponse)(nil), "isula.build.v1.PushResponse")
|
||||
proto.RegisterType((*PullRequest)(nil), "isula.build.v1.PullRequest")
|
||||
proto.RegisterType((*PullResponse)(nil), "isula.build.v1.PullResponse")
|
||||
proto.RegisterType((*SaveRequest)(nil), "isula.build.v1.SaveRequest")
|
||||
+ proto.RegisterType((*SeparatorSave)(nil), "isula.build.v1.SeparatorSave")
|
||||
proto.RegisterType((*SaveResponse)(nil), "isula.build.v1.SaveResponse")
|
||||
proto.RegisterType((*MemData)(nil), "isula.build.v1.MemData")
|
||||
proto.RegisterType((*MemStat)(nil), "isula.build.v1.MemStat")
|
||||
@@ -2202,124 +2392,133 @@ func init() {
|
||||
func init() { proto.RegisterFile("api/services/control.proto", fileDescriptor_d71ef680555cb937) }
|
||||
|
||||
var fileDescriptor_d71ef680555cb937 = []byte{
|
||||
- // 1861 bytes of a gzipped FileDescriptorProto
|
||||
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x58, 0x5f, 0x73, 0x1c, 0x47,
|
||||
- 0x11, 0xe7, 0xfe, 0xe8, 0x5f, 0xdf, 0xe9, 0xac, 0x9a, 0x18, 0x67, 0xeb, 0xac, 0x24, 0x62, 0x63,
|
||||
- 0x62, 0x61, 0x8a, 0xb3, 0x2c, 0x78, 0x20, 0x14, 0x50, 0xe8, 0x4f, 0x6c, 0x0e, 0x6c, 0x19, 0x56,
|
||||
- 0x4a, 0x28, 0x2a, 0x55, 0xb8, 0x46, 0x77, 0xa3, 0xd3, 0x96, 0x77, 0x77, 0x96, 0x99, 0x59, 0xc5,
|
||||
- 0x07, 0x5f, 0x83, 0x37, 0xe0, 0x33, 0xf0, 0x98, 0x4f, 0xc0, 0x13, 0x5f, 0x8a, 0xea, 0xf9, 0xb3,
|
||||
- 0x3b, 0x7b, 0x7b, 0x27, 0x39, 0x6f, 0xfb, 0xeb, 0xee, 0xe9, 0xee, 0xe9, 0xee, 0xe9, 0xe9, 0x59,
|
||||
- 0x18, 0xd2, 0x3c, 0x7e, 0x2a, 0x99, 0xb8, 0x89, 0x27, 0x4c, 0x3e, 0x9d, 0xf0, 0x4c, 0x09, 0x9e,
|
||||
- 0x8c, 0x72, 0xc1, 0x15, 0x27, 0x83, 0x58, 0x16, 0x09, 0x1d, 0x5d, 0x16, 0x71, 0x32, 0x1d, 0xdd,
|
||||
- 0x3c, 0x1b, 0x3e, 0x9c, 0x71, 0x3e, 0x4b, 0xd8, 0x53, 0xcd, 0xbd, 0x2c, 0xae, 0x9e, 0xb2, 0x34,
|
||||
- 0x57, 0x73, 0x23, 0x3c, 0xfc, 0x64, 0x91, 0xa9, 0xe2, 0x94, 0x49, 0x45, 0xd3, 0xdc, 0x08, 0x84,
|
||||
- 0xff, 0xed, 0x40, 0xff, 0x18, 0x55, 0x45, 0xec, 0xaf, 0x05, 0x93, 0x8a, 0x04, 0xb0, 0xa1, 0x55,
|
||||
- 0x8f, 0x4f, 0x83, 0xd6, 0x5e, 0x6b, 0x7f, 0x2b, 0x72, 0x90, 0xec, 0xc2, 0x96, 0xfe, 0xbc, 0x98,
|
||||
- 0xe7, 0x2c, 0x68, 0x6b, 0x5e, 0x45, 0x20, 0x1f, 0x03, 0xa0, 0x9f, 0xec, 0x9d, 0x3a, 0x8d, 0x45,
|
||||
- 0xd0, 0xd1, 0x6c, 0x8f, 0x42, 0xf6, 0xa0, 0x77, 0x15, 0x27, 0xec, 0x04, 0x29, 0x99, 0x0a, 0xba,
|
||||
- 0x5a, 0xc0, 0x27, 0x91, 0x07, 0xb0, 0xce, 0x0b, 0x95, 0x17, 0x2a, 0x58, 0xd3, 0x4c, 0x8b, 0x4a,
|
||||
- 0xbb, 0x47, 0x62, 0x26, 0x83, 0xf5, 0xbd, 0x4e, 0x69, 0x17, 0x09, 0xe4, 0x3e, 0xac, 0xe5, 0x82,
|
||||
- 0xbf, 0x9b, 0x07, 0x1b, 0x7b, 0xad, 0xfd, 0xcd, 0xc8, 0x00, 0xdc, 0x45, 0x1c, 0x4f, 0x51, 0x7b,
|
||||
- 0xb0, 0x69, 0x76, 0x61, 0x21, 0xf9, 0x15, 0xf4, 0xf4, 0xe2, 0x73, 0x45, 0x55, 0x3c, 0x09, 0xb6,
|
||||
- 0xf6, 0x5a, 0xfb, 0xbd, 0xc3, 0x87, 0xa3, 0x7a, 0x50, 0x47, 0xc7, 0x95, 0x48, 0xe4, 0xcb, 0x93,
|
||||
- 0x47, 0xb0, 0x4d, 0xa7, 0xd3, 0x58, 0xc5, 0x3c, 0xa3, 0xc9, 0x05, 0x9d, 0x05, 0xa0, 0xd5, 0xd7,
|
||||
- 0x89, 0x3a, 0x18, 0x34, 0x3f, 0x9a, 0x4e, 0x5f, 0xc6, 0x52, 0x05, 0x3d, 0xed, 0xb3, 0x47, 0x21,
|
||||
- 0x43, 0xd8, 0x64, 0x99, 0x8a, 0xd5, 0x7c, 0x7c, 0x1a, 0xf4, 0xb5, 0x82, 0x12, 0xe3, 0x76, 0x59,
|
||||
- 0x36, 0x11, 0xf3, 0x5c, 0xb1, 0x69, 0xb0, 0xad, 0x37, 0x55, 0x11, 0x30, 0x48, 0x57, 0x5c, 0xa4,
|
||||
- 0x54, 0x05, 0x03, 0x13, 0x24, 0x83, 0x42, 0x0a, 0xdb, 0xe3, 0x34, 0xe7, 0x42, 0xb9, 0x3c, 0x0e,
|
||||
- 0x61, 0x33, 0xd6, 0x84, 0x32, 0x91, 0x25, 0x46, 0x25, 0x92, 0x17, 0x62, 0xe2, 0xd2, 0x68, 0x11,
|
||||
- 0x9a, 0x16, 0xec, 0x8a, 0x09, 0x96, 0x4d, 0x98, 0x4d, 0x61, 0x45, 0x08, 0x43, 0x18, 0x38, 0x13,
|
||||
- 0x32, 0xe7, 0x99, 0x64, 0x64, 0x07, 0x3a, 0x09, 0x9f, 0x59, 0xf5, 0xf8, 0x19, 0xbe, 0x80, 0x9e,
|
||||
- 0x17, 0x3a, 0xf2, 0x73, 0x57, 0x32, 0x71, 0xca, 0xb4, 0x58, 0xef, 0x70, 0x38, 0x32, 0x25, 0x39,
|
||||
- 0x72, 0x25, 0x39, 0xba, 0x70, 0x25, 0x19, 0x55, 0xc2, 0xe1, 0x8f, 0x60, 0xdb, 0x96, 0xa5, 0xb5,
|
||||
- 0x85, 0x19, 0x4d, 0xe9, 0x8c, 0x55, 0x75, 0x69, 0x21, 0x8a, 0xa2, 0xb9, 0x42, 0xde, 0x59, 0xc2,
|
||||
- 0xe1, 0x13, 0x18, 0x38, 0xd1, 0x4a, 0xed, 0xc4, 0x96, 0xa4, 0x95, 0xb5, 0x30, 0xfc, 0x31, 0xf4,
|
||||
- 0x30, 0x57, 0x4e, 0xe9, 0x2e, 0x6c, 0x69, 0x83, 0x67, 0xd4, 0x6e, 0x65, 0x2b, 0xaa, 0x08, 0xe1,
|
||||
- 0xcf, 0x00, 0x2e, 0xe8, 0xcc, 0xc9, 0xde, 0x87, 0x35, 0xcd, 0xb2, 0x72, 0x06, 0x60, 0xb4, 0x14,
|
||||
- 0x9d, 0xd9, 0x90, 0xe3, 0x67, 0xf8, 0xbf, 0x16, 0xf4, 0x8d, 0x0d, 0xeb, 0xcd, 0xaf, 0x61, 0x5d,
|
||||
- 0xcb, 0xca, 0xa0, 0xb5, 0xd7, 0xd9, 0xef, 0x1d, 0x7e, 0xb6, 0x58, 0x97, 0xbe, 0xf4, 0x68, 0xac,
|
||||
- 0x03, 0x90, 0x5d, 0xf1, 0xc8, 0xae, 0x1a, 0xfe, 0x1d, 0xb6, 0x4a, 0x22, 0x16, 0xa1, 0x60, 0x39,
|
||||
- 0x97, 0xb1, 0xe2, 0x62, 0x6e, 0x5d, 0xf1, 0x28, 0x4d, 0x7f, 0xc8, 0x00, 0xda, 0xf1, 0xd4, 0x26,
|
||||
- 0xbe, 0x1d, 0x4f, 0x75, 0x70, 0x04, 0xa3, 0x58, 0x88, 0x5d, 0x1b, 0x1c, 0x03, 0x09, 0x81, 0xae,
|
||||
- 0x8c, 0xff, 0xc6, 0xec, 0x49, 0xd5, 0xdf, 0xe1, 0xbf, 0x5b, 0x70, 0xef, 0x2b, 0x26, 0x64, 0xcc,
|
||||
- 0x33, 0x3f, 0xbc, 0x37, 0x86, 0xe4, 0xc2, 0x6b, 0x21, 0xc6, 0x73, 0xc6, 0xad, 0xb8, 0xeb, 0x26,
|
||||
- 0x25, 0x41, 0x73, 0x63, 0x75, 0xc2, 0xd3, 0x34, 0x56, 0xae, 0x12, 0x4b, 0x42, 0xd5, 0x89, 0xb0,
|
||||
- 0xac, 0xba, 0x7e, 0x27, 0x8a, 0x53, 0xa6, 0xfb, 0x88, 0x3c, 0x12, 0x93, 0xeb, 0xb2, 0x8f, 0x68,
|
||||
- 0x14, 0xfe, 0x11, 0xb6, 0x23, 0x96, 0xf2, 0x1b, 0xe6, 0xd5, 0x49, 0x55, 0x52, 0x1d, 0xaf, 0xa4,
|
||||
- 0x30, 0x34, 0x34, 0x49, 0xb4, 0x5b, 0x9b, 0x11, 0x7e, 0x9a, 0x36, 0x53, 0x64, 0xe6, 0x58, 0xe8,
|
||||
- 0x36, 0x53, 0x64, 0x98, 0xf6, 0x81, 0x53, 0x69, 0x37, 0x1c, 0x42, 0x3f, 0xa1, 0x73, 0x26, 0x5e,
|
||||
- 0x31, 0x29, 0xab, 0x0a, 0xa8, 0xd1, 0xc2, 0x7f, 0xb5, 0xe0, 0x83, 0xdf, 0x32, 0x9a, 0xa8, 0xeb,
|
||||
- 0x93, 0x6b, 0x36, 0x79, 0x5b, 0xae, 0x1d, 0xc3, 0xba, 0xd4, 0xd5, 0xa9, 0x57, 0x0d, 0x0e, 0x9f,
|
||||
- 0x2d, 0x66, 0x7f, 0xc9, 0xa2, 0xd1, 0x39, 0xde, 0x12, 0xd9, 0xcc, 0x96, 0xb5, 0x55, 0x10, 0xfe,
|
||||
- 0x02, 0xb6, 0x6b, 0x0c, 0xd2, 0x83, 0x8d, 0x2f, 0xcf, 0x7e, 0x7f, 0xf6, 0xfa, 0x4f, 0x67, 0x3b,
|
||||
- 0xdf, 0x43, 0x70, 0xfe, 0x45, 0xf4, 0xd5, 0xf8, 0xec, 0xc5, 0x4e, 0x8b, 0xdc, 0x83, 0xde, 0xd9,
|
||||
- 0xeb, 0x8b, 0x37, 0x8e, 0xd0, 0x0e, 0xff, 0x02, 0xfd, 0x97, 0x7c, 0x16, 0x67, 0x2e, 0x4c, 0xd8,
|
||||
- 0x2d, 0x98, 0xb8, 0x61, 0xc2, 0x6e, 0xc6, 0x22, 0xec, 0x30, 0x85, 0x64, 0x22, 0xc3, 0x03, 0x61,
|
||||
- 0x12, 0x58, 0x62, 0xe4, 0xe5, 0x54, 0xca, 0x6f, 0xb8, 0x70, 0xf5, 0x54, 0x62, 0x3c, 0xaf, 0x56,
|
||||
- 0xff, 0x9d, 0x67, 0xf0, 0x73, 0x2d, 0xca, 0x0b, 0x75, 0x97, 0x2f, 0x8d, 0x84, 0x85, 0xfb, 0x30,
|
||||
- 0x70, 0x4b, 0xad, 0x99, 0x07, 0xb0, 0x2e, 0x98, 0x2c, 0x12, 0x67, 0xc5, 0xa2, 0xf0, 0x07, 0xd0,
|
||||
- 0x7b, 0xc9, 0x69, 0x79, 0x01, 0x12, 0xe8, 0xe6, 0x54, 0x5d, 0x5b, 0x21, 0xfd, 0x1d, 0xee, 0x61,
|
||||
- 0x48, 0xe8, 0xf4, 0x96, 0xc6, 0xf7, 0x35, 0xf4, 0xfe, 0x50, 0xc8, 0x6b, 0xcf, 0xcf, 0xbc, 0x90,
|
||||
- 0xd7, 0x65, 0x07, 0xb2, 0xa8, 0xde, 0x45, 0xda, 0x0b, 0x5d, 0xc4, 0x6b, 0xee, 0x9d, 0x5a, 0x73,
|
||||
- 0x7f, 0x02, 0x7d, 0xa3, 0xdc, 0x9a, 0x1f, 0xc2, 0xa6, 0xb0, 0xdf, 0xae, 0xb7, 0x3b, 0x1c, 0x9e,
|
||||
- 0xa0, 0x23, 0x49, 0x52, 0x73, 0x24, 0x49, 0x7c, 0x47, 0x10, 0xdd, 0xee, 0x88, 0x31, 0x88, 0x4a,
|
||||
- 0xde, 0xc3, 0x60, 0x0c, 0xbd, 0x73, 0x5a, 0x1d, 0x2a, 0xcc, 0x10, 0xbd, 0xa9, 0xda, 0xb4, 0x45,
|
||||
- 0x48, 0xb7, 0xad, 0xad, 0xad, 0xcf, 0x9a, 0x45, 0x65, 0xb8, 0x3b, 0x55, 0xb8, 0xbd, 0x38, 0x74,
|
||||
- 0x6b, 0x71, 0xd8, 0x83, 0xbe, 0x31, 0xb5, 0x32, 0x0d, 0x73, 0xd8, 0x78, 0xc5, 0xd2, 0x53, 0xaa,
|
||||
- 0x28, 0xfa, 0x9c, 0xb2, 0xf4, 0x82, 0x2b, 0x9a, 0x68, 0x89, 0x4e, 0x54, 0x62, 0xac, 0xb8, 0x94,
|
||||
- 0xa5, 0xcf, 0x05, 0x33, 0x7b, 0xef, 0x44, 0x0e, 0x62, 0x5c, 0xe4, 0x37, 0x34, 0x37, 0xcb, 0x3a,
|
||||
- 0x9a, 0x57, 0x11, 0x50, 0x27, 0x02, 0xbd, 0xb0, 0x6b, 0x74, 0x3a, 0x1c, 0x7e, 0xdb, 0xd2, 0xb6,
|
||||
- 0xf1, 0xbc, 0xe1, 0x06, 0x52, 0x96, 0x9e, 0xcf, 0xcd, 0x49, 0xee, 0x46, 0x16, 0xa1, 0xdd, 0x6b,
|
||||
- 0x46, 0x73, 0x64, 0xb4, 0x35, 0xc3, 0x41, 0xb4, 0x8b, 0x9f, 0x47, 0x49, 0xc2, 0x27, 0xda, 0x6e,
|
||||
- 0x37, 0xaa, 0x08, 0x8e, 0x3b, 0xce, 0xbe, 0x94, 0xc6, 0xb0, 0xe5, 0x6a, 0x02, 0x7a, 0xa5, 0xc1,
|
||||
- 0x34, 0x31, 0x0d, 0xb9, 0x1b, 0x95, 0x18, 0xfb, 0x11, 0x7e, 0x47, 0x2c, 0x61, 0x54, 0xb2, 0x69,
|
||||
- 0xb0, 0xae, 0xf9, 0x35, 0x5a, 0xf8, 0x06, 0x7a, 0xe7, 0x8a, 0x0b, 0x3a, 0x63, 0x3a, 0x70, 0x8f,
|
||||
- 0x60, 0x5b, 0x5a, 0x28, 0xe2, 0xea, 0xa8, 0xd5, 0x89, 0xe4, 0x09, 0xec, 0x58, 0xc2, 0x31, 0x9d,
|
||||
- 0xbc, 0x8d, 0xb3, 0xd9, 0x73, 0x69, 0xeb, 0xa8, 0x41, 0x0f, 0xff, 0xd1, 0x82, 0x7e, 0xc4, 0x66,
|
||||
- 0xb1, 0x54, 0x62, 0xae, 0x4d, 0x3c, 0x81, 0x1d, 0x61, 0x70, 0xcc, 0xe4, 0x39, 0xa3, 0xd8, 0xac,
|
||||
- 0x4d, 0x0b, 0x6e, 0xd0, 0xc9, 0x08, 0x48, 0x45, 0x1b, 0x67, 0x92, 0x4d, 0x0a, 0xc1, 0x6c, 0x11,
|
||||
- 0x2d, 0xe1, 0x90, 0x7d, 0xb8, 0x57, 0x51, 0x8f, 0x13, 0x3e, 0x79, 0x1b, 0x74, 0xb4, 0xf0, 0x22,
|
||||
- 0x39, 0x7c, 0x0c, 0x3d, 0x7d, 0x7b, 0x56, 0xd7, 0xc1, 0x0d, 0x13, 0x97, 0xdc, 0xd6, 0xf8, 0x66,
|
||||
- 0xe4, 0x60, 0xf8, 0x9f, 0x0e, 0xf4, 0x8d, 0xa4, 0x2d, 0xbc, 0x67, 0xba, 0x7e, 0x90, 0x64, 0xa7,
|
||||
- 0x9a, 0x0f, 0x17, 0x5b, 0xb5, 0xad, 0xc2, 0xc8, 0xc9, 0xe1, 0xdc, 0x69, 0xe3, 0xa2, 0x97, 0xb5,
|
||||
- 0x97, 0xcf, 0x9d, 0x5e, 0x1e, 0x22, 0x5f, 0x9e, 0xfc, 0x06, 0xfa, 0xd6, 0xfd, 0xb9, 0x5e, 0xdf,
|
||||
- 0xd1, 0xeb, 0x77, 0x17, 0xd7, 0xfb, 0x51, 0x8e, 0x6a, 0x2b, 0xb0, 0x4a, 0xa6, 0x48, 0xe5, 0xdc,
|
||||
- 0x1d, 0xab, 0x12, 0xe3, 0xd6, 0x45, 0x91, 0x69, 0x96, 0xb9, 0x33, 0x1d, 0xc4, 0x21, 0xe2, 0xf5,
|
||||
- 0xc9, 0x38, 0x2a, 0x32, 0x7c, 0x38, 0xe8, 0xea, 0xd9, 0x8a, 0x3c, 0x0a, 0xf2, 0xb5, 0x71, 0x26,
|
||||
- 0xce, 0x8a, 0x54, 0xcf, 0xe0, 0x9d, 0xc8, 0xa3, 0x20, 0x7f, 0xc6, 0x23, 0x5e, 0xa8, 0x38, 0x63,
|
||||
- 0x52, 0xcf, 0xe2, 0x9d, 0xc8, 0xa3, 0xd8, 0x48, 0xe2, 0xa1, 0xb1, 0xa3, 0xf8, 0xb2, 0x48, 0x22,
|
||||
- 0x3b, 0x72, 0x72, 0x58, 0xd2, 0xec, 0x5d, 0xce, 0x44, 0x9c, 0xb2, 0x0c, 0x4f, 0x29, 0xe8, 0x64,
|
||||
- 0xd5, 0x68, 0xe1, 0x9f, 0xe1, 0xfb, 0xaf, 0x68, 0x16, 0x5f, 0x31, 0xa9, 0x4e, 0xf4, 0xc8, 0xe2,
|
||||
- 0x92, 0x1c, 0x42, 0x3f, 0xb5, 0x0c, 0x3d, 0x9b, 0xdb, 0xfb, 0xd9, 0xa7, 0xe1, 0x69, 0x73, 0xd8,
|
||||
- 0x75, 0xab, 0x8a, 0x10, 0x1e, 0xc2, 0x83, 0x45, 0xd5, 0x77, 0x8e, 0xa8, 0xdf, 0xb6, 0xe0, 0x43,
|
||||
- 0xb7, 0xe8, 0x28, 0xcb, 0xb8, 0xfa, 0x8e, 0x1e, 0x61, 0x2f, 0xb3, 0xd8, 0x5d, 0xb5, 0x0e, 0x63,
|
||||
- 0x03, 0xd5, 0xe7, 0xc7, 0x36, 0x50, 0x7d, 0x66, 0x06, 0xd0, 0xe6, 0xd2, 0x66, 0xb9, 0xcd, 0x25,
|
||||
- 0x66, 0x81, 0xcb, 0xe7, 0x8c, 0xaa, 0x42, 0x30, 0x19, 0xac, 0x99, 0xf7, 0x48, 0x45, 0xd1, 0xa5,
|
||||
- 0x4f, 0x45, 0x4c, 0x33, 0x65, 0x53, 0xec, 0x60, 0xf8, 0xcb, 0x6a, 0xb7, 0xe3, 0x4c, 0xe6, 0x6c,
|
||||
- 0xa2, 0xbe, 0x83, 0xdf, 0xe1, 0x4f, 0xaa, 0x6d, 0x97, 0xab, 0x6d, 0xb0, 0x08, 0x74, 0xb1, 0xfc,
|
||||
- 0xf4, 0xb2, 0x7e, 0xa4, 0xbf, 0xc3, 0x57, 0xf0, 0x81, 0x13, 0xf7, 0x2f, 0xd3, 0xf7, 0x89, 0x10,
|
||||
- 0xaa, 0xab, 0xa2, 0xa3, 0xbf, 0xc3, 0x11, 0xdc, 0xaf, 0xab, 0xbb, 0x7d, 0x10, 0x38, 0xfc, 0x67,
|
||||
- 0x0f, 0x36, 0x4e, 0xcc, 0x5b, 0x9b, 0x9c, 0xc2, 0x9a, 0x7e, 0x7f, 0x90, 0xdd, 0xa5, 0x4f, 0x43,
|
||||
- 0xeb, 0xda, 0xf0, 0xa3, 0x15, 0xdc, 0x6a, 0xa2, 0xb3, 0xf3, 0xd7, 0x47, 0xcd, 0x93, 0xee, 0x3d,
|
||||
- 0x59, 0x86, 0x1f, 0xaf, 0x62, 0x1b, 0x45, 0x07, 0x2d, 0x72, 0x04, 0x5d, 0xbd, 0xd1, 0x87, 0xcb,
|
||||
- 0x9f, 0x04, 0x46, 0xcd, 0xee, 0x6d, 0xef, 0x05, 0x72, 0x0c, 0x1b, 0x6e, 0xbe, 0x7e, 0xd0, 0x78,
|
||||
- 0x85, 0x7d, 0x91, 0xe6, 0x6a, 0x3e, 0xfc, 0x64, 0x51, 0xc1, 0xe2, 0x40, 0x7f, 0x02, 0x5d, 0x8c,
|
||||
- 0x65, 0xd3, 0x0d, 0x2f, 0x61, 0x4d, 0x37, 0xfc, 0xf0, 0x1f, 0xb4, 0x8c, 0x92, 0x24, 0x59, 0xa6,
|
||||
- 0xa4, 0x9c, 0x5c, 0x96, 0x29, 0xa9, 0x26, 0x92, 0x83, 0x16, 0xc6, 0xd6, 0xcc, 0xde, 0xcd, 0xd8,
|
||||
- 0xd6, 0xc6, 0xfc, 0x66, 0x6c, 0xeb, 0x23, 0xfb, 0x41, 0x8b, 0xfc, 0x0e, 0x7a, 0xde, 0x68, 0xbd,
|
||||
- 0x32, 0x38, 0x9f, 0xbe, 0xc7, 0x3c, 0x8e, 0x85, 0xa3, 0xa7, 0xdb, 0x66, 0xe1, 0xf8, 0x43, 0x75,
|
||||
- 0xb3, 0x70, 0xea, 0x23, 0xf1, 0x0b, 0x58, 0x37, 0xd3, 0x2b, 0x59, 0x26, 0x58, 0x0d, 0xc4, 0xcd,
|
||||
- 0xcd, 0x2d, 0x0c, 0xbd, 0x27, 0xd0, 0xc5, 0xc9, 0x75, 0x49, 0xd9, 0x54, 0x23, 0xef, 0x92, 0xb2,
|
||||
- 0xf1, 0x86, 0x5d, 0x13, 0x6a, 0xf3, 0xf2, 0x6f, 0x7a, 0x53, 0xfb, 0xe9, 0xd0, 0xf4, 0xa6, 0xfe,
|
||||
- 0xc3, 0xe0, 0xa0, 0x45, 0x3e, 0x87, 0xce, 0x05, 0x9d, 0x91, 0xe1, 0xa2, 0x60, 0xf5, 0x7a, 0x1e,
|
||||
- 0xae, 0x08, 0x3f, 0x6e, 0x05, 0xa7, 0xbf, 0xe6, 0x56, 0xbc, 0xf1, 0xb3, 0xb9, 0x15, 0x7f, 0x60,
|
||||
- 0x34, 0xc7, 0x48, 0xdf, 0x86, 0x0d, 0x25, 0xde, 0x24, 0xd0, 0x54, 0x52, 0xbb, 0xfc, 0xdf, 0xc0,
|
||||
- 0xa0, 0x7e, 0x01, 0x90, 0x1f, 0x36, 0xee, 0xac, 0x65, 0x77, 0xcf, 0xf0, 0xb3, 0xbb, 0xc4, 0xac,
|
||||
- 0x81, 0x73, 0xd8, 0x59, 0xbc, 0x2c, 0xc8, 0xe3, 0x55, 0x6b, 0x17, 0xae, 0x93, 0x95, 0xd1, 0xbb,
|
||||
- 0x84, 0x7b, 0x0b, 0xad, 0x98, 0xac, 0xf4, 0xa7, 0xde, 0xe9, 0x87, 0x8f, 0xef, 0x94, 0xb3, 0x8e,
|
||||
- 0x7f, 0x0d, 0x7d, 0xbf, 0xe1, 0x92, 0x4f, 0x57, 0x2d, 0xf4, 0x9b, 0xc5, 0xa3, 0xdb, 0x85, 0x5c,
|
||||
- 0xe6, 0x2e, 0xd7, 0xf5, 0x86, 0x7e, 0xfa, 0xff, 0x00, 0x00, 0x00, 0xff, 0xff, 0x16, 0xc7, 0x54,
|
||||
- 0xaa, 0x1c, 0x15, 0x00, 0x00,
|
||||
+ // 2001 bytes of a gzipped FileDescriptorProto
|
||||
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x58, 0xdd, 0x73, 0x1c, 0x47,
|
||||
+ 0x11, 0x67, 0xef, 0x4e, 0x5f, 0x7d, 0xa7, 0xb3, 0x6a, 0x63, 0x9c, 0xab, 0xb3, 0x92, 0xa8, 0x36,
|
||||
+ 0x26, 0x16, 0xa6, 0x38, 0xc9, 0x82, 0x07, 0x42, 0x01, 0x85, 0x3e, 0x62, 0x73, 0x60, 0xcb, 0xb0,
|
||||
+ 0x52, 0x42, 0x51, 0xa9, 0xc2, 0x35, 0xba, 0x1b, 0x9d, 0x16, 0xef, 0xee, 0x2c, 0x33, 0xb3, 0x8a,
|
||||
+ 0x8f, 0xfc, 0x15, 0x54, 0xf1, 0xc4, 0xc7, 0xdf, 0xc0, 0x63, 0xfe, 0x02, 0x9e, 0xf8, 0xa7, 0x52,
|
||||
+ 0xdd, 0x33, 0xb3, 0x1f, 0xb7, 0x27, 0xc9, 0x79, 0xdb, 0xfe, 0x98, 0x9e, 0x9e, 0xee, 0xdf, 0x74,
|
||||
+ 0x4f, 0x2f, 0x0c, 0x59, 0x16, 0xed, 0x29, 0x2e, 0xaf, 0xa3, 0x09, 0x57, 0x7b, 0x13, 0x91, 0x6a,
|
||||
+ 0x29, 0xe2, 0x51, 0x26, 0x85, 0x16, 0x7e, 0x3f, 0x52, 0x79, 0xcc, 0x46, 0x17, 0x79, 0x14, 0x4f,
|
||||
+ 0x47, 0xd7, 0x4f, 0x87, 0x0f, 0x67, 0x42, 0xcc, 0x62, 0xbe, 0x47, 0xd2, 0x8b, 0xfc, 0x72, 0x8f,
|
||||
+ 0x27, 0x99, 0x9e, 0x1b, 0xe5, 0xe1, 0x47, 0x8b, 0x42, 0x1d, 0x25, 0x5c, 0x69, 0x96, 0x64, 0x46,
|
||||
+ 0x21, 0xf8, 0x5f, 0x1b, 0x7a, 0x47, 0x68, 0x2a, 0xe4, 0x7f, 0xcd, 0xb9, 0xd2, 0xfe, 0x00, 0xd6,
|
||||
+ 0xc8, 0xf4, 0xf8, 0x64, 0xe0, 0xed, 0x78, 0xbb, 0x1b, 0xa1, 0x23, 0xfd, 0x6d, 0xd8, 0xa0, 0xcf,
|
||||
+ 0xf3, 0x79, 0xc6, 0x07, 0x2d, 0x92, 0x95, 0x0c, 0xff, 0x43, 0x00, 0xf4, 0x93, 0xbf, 0xd5, 0x27,
|
||||
+ 0x91, 0x1c, 0xb4, 0x49, 0x5c, 0xe1, 0xf8, 0x3b, 0xd0, 0xbd, 0x8c, 0x62, 0x7e, 0x8c, 0x9c, 0x54,
|
||||
+ 0x0f, 0x3a, 0xa4, 0x50, 0x65, 0xf9, 0x0f, 0x60, 0x55, 0xe4, 0x3a, 0xcb, 0xf5, 0x60, 0x85, 0x84,
|
||||
+ 0x96, 0x2a, 0xf6, 0x3d, 0x94, 0x33, 0x35, 0x58, 0xdd, 0x69, 0x17, 0xfb, 0x22, 0xc3, 0xbf, 0x0f,
|
||||
+ 0x2b, 0x99, 0x14, 0x6f, 0xe7, 0x83, 0xb5, 0x1d, 0x6f, 0x77, 0x3d, 0x34, 0x04, 0x9e, 0x22, 0x8a,
|
||||
+ 0xa6, 0x68, 0x7d, 0xb0, 0x6e, 0x4e, 0x61, 0x49, 0xff, 0x97, 0xd0, 0xa5, 0xc5, 0x67, 0x9a, 0xe9,
|
||||
+ 0x68, 0x32, 0xd8, 0xd8, 0xf1, 0x76, 0xbb, 0x07, 0x0f, 0x47, 0xf5, 0xa0, 0x8e, 0x8e, 0x4a, 0x95,
|
||||
+ 0xb0, 0xaa, 0xef, 0x3f, 0x82, 0x4d, 0x36, 0x9d, 0x46, 0x3a, 0x12, 0x29, 0x8b, 0xcf, 0xd9, 0x6c,
|
||||
+ 0x00, 0x64, 0xbe, 0xce, 0xa4, 0x60, 0xb0, 0xec, 0x70, 0x3a, 0x7d, 0x11, 0x29, 0x3d, 0xe8, 0x92,
|
||||
+ 0xcf, 0x15, 0x8e, 0x3f, 0x84, 0x75, 0x9e, 0xea, 0x48, 0xcf, 0xc7, 0x27, 0x83, 0x1e, 0x19, 0x28,
|
||||
+ 0x68, 0x3c, 0x2e, 0x4f, 0x27, 0x72, 0x9e, 0x69, 0x3e, 0x1d, 0x6c, 0xd2, 0xa1, 0x4a, 0x06, 0x06,
|
||||
+ 0xe9, 0x52, 0xc8, 0x84, 0xe9, 0x41, 0xdf, 0x04, 0xc9, 0x50, 0x01, 0x83, 0xcd, 0x71, 0x92, 0x09,
|
||||
+ 0xa9, 0x5d, 0x1e, 0x87, 0xb0, 0x1e, 0x11, 0xa3, 0x48, 0x64, 0x41, 0xa3, 0x11, 0x25, 0x72, 0x39,
|
||||
+ 0x71, 0x69, 0xb4, 0x14, 0x6e, 0x2d, 0xf9, 0x25, 0x97, 0x3c, 0x9d, 0x70, 0x9b, 0xc2, 0x92, 0x11,
|
||||
+ 0x04, 0xd0, 0x77, 0x5b, 0xa8, 0x4c, 0xa4, 0x8a, 0xfb, 0x5b, 0xd0, 0x8e, 0xc5, 0xcc, 0x9a, 0xc7,
|
||||
+ 0xcf, 0xe0, 0x39, 0x74, 0x2b, 0xa1, 0xf3, 0x7f, 0xe6, 0x20, 0x13, 0x25, 0x9c, 0xd4, 0xba, 0x07,
|
||||
+ 0xc3, 0x91, 0x81, 0xe4, 0xc8, 0x41, 0x72, 0x74, 0xee, 0x20, 0x19, 0x96, 0xca, 0xc1, 0x0f, 0x61,
|
||||
+ 0xd3, 0xc2, 0xd2, 0xee, 0x85, 0x19, 0x4d, 0xd8, 0x8c, 0x97, 0xb8, 0xb4, 0x24, 0xaa, 0xe2, 0x76,
|
||||
+ 0xb9, 0xba, 0x13, 0xc2, 0xc1, 0x13, 0xe8, 0x3b, 0xd5, 0xd2, 0xec, 0xc4, 0x42, 0xd2, 0xea, 0x5a,
|
||||
+ 0x32, 0xf8, 0x11, 0x74, 0x31, 0x57, 0xce, 0xe8, 0x36, 0x6c, 0xd0, 0x86, 0xa7, 0xcc, 0x1e, 0x65,
|
||||
+ 0x23, 0x2c, 0x19, 0xc1, 0x4f, 0x01, 0xce, 0xd9, 0xcc, 0xe9, 0xde, 0x87, 0x15, 0x12, 0x59, 0x3d,
|
||||
+ 0x43, 0x60, 0xb4, 0x34, 0x9b, 0xd9, 0x90, 0xe3, 0x67, 0xf0, 0x7f, 0x0f, 0x7a, 0x66, 0x0f, 0xeb,
|
||||
+ 0xcd, 0xaf, 0x60, 0x95, 0x74, 0xd5, 0xc0, 0xdb, 0x69, 0xef, 0x76, 0x0f, 0x3e, 0x59, 0xc4, 0x65,
|
||||
+ 0x55, 0x7b, 0x34, 0xa6, 0x00, 0xa4, 0x97, 0x22, 0xb4, 0xab, 0x86, 0x5f, 0xc3, 0x46, 0xc1, 0x44,
|
||||
+ 0x10, 0x4a, 0x9e, 0x09, 0x15, 0x69, 0x21, 0xe7, 0xd6, 0x95, 0x0a, 0xa7, 0xe9, 0x8f, 0xdf, 0x87,
|
||||
+ 0x56, 0x34, 0xb5, 0x89, 0x6f, 0x45, 0x53, 0x0a, 0x8e, 0xe4, 0x0c, 0x81, 0xd8, 0xb1, 0xc1, 0x31,
|
||||
+ 0xa4, 0xef, 0x43, 0x47, 0x45, 0x7f, 0xe3, 0xf6, 0xa6, 0xd2, 0x77, 0xf0, 0x1f, 0x0f, 0xee, 0x7d,
|
||||
+ 0xc1, 0xa5, 0x8a, 0x44, 0x5a, 0x0d, 0xef, 0xb5, 0x61, 0xb9, 0xf0, 0x5a, 0x12, 0xe3, 0x39, 0x13,
|
||||
+ 0x56, 0xdd, 0x55, 0x93, 0x82, 0x41, 0xd2, 0x48, 0x1f, 0x8b, 0x24, 0x89, 0xb4, 0x43, 0x62, 0xc1,
|
||||
+ 0x28, 0x2b, 0x11, 0xc2, 0xaa, 0x53, 0xad, 0x44, 0x51, 0xc2, 0xa9, 0x8e, 0xa8, 0x43, 0x39, 0xb9,
|
||||
+ 0x2a, 0xea, 0x08, 0x51, 0xc1, 0x1f, 0x60, 0x33, 0xe4, 0x89, 0xb8, 0xe6, 0x15, 0x9c, 0x94, 0x90,
|
||||
+ 0x6a, 0x57, 0x20, 0x85, 0xa1, 0x61, 0x71, 0x4c, 0x6e, 0xad, 0x87, 0xf8, 0x69, 0xca, 0x4c, 0x9e,
|
||||
+ 0x9a, 0x6b, 0x41, 0x65, 0x26, 0x4f, 0x31, 0xed, 0x7d, 0x67, 0xd2, 0x1e, 0x38, 0x80, 0x5e, 0xcc,
|
||||
+ 0xe6, 0x5c, 0xbe, 0xe4, 0x4a, 0x95, 0x08, 0xa8, 0xf1, 0x82, 0x7f, 0x7b, 0xf0, 0xde, 0x6f, 0x38,
|
||||
+ 0x8b, 0xf5, 0xd5, 0xf1, 0x15, 0x9f, 0xbc, 0x29, 0xd6, 0x8e, 0x61, 0x55, 0x11, 0x3a, 0x69, 0x55,
|
||||
+ 0xff, 0xe0, 0xe9, 0x62, 0xf6, 0x97, 0x2c, 0x1a, 0x9d, 0x61, 0x97, 0x48, 0x67, 0x16, 0xd6, 0xd6,
|
||||
+ 0x40, 0xf0, 0x73, 0xd8, 0xac, 0x09, 0xfc, 0x2e, 0xac, 0x7d, 0x7e, 0xfa, 0xbb, 0xd3, 0x57, 0x7f,
|
||||
+ 0x3c, 0xdd, 0xfa, 0x1e, 0x12, 0x67, 0x9f, 0x85, 0x5f, 0x8c, 0x4f, 0x9f, 0x6f, 0x79, 0xfe, 0x3d,
|
||||
+ 0xe8, 0x9e, 0xbe, 0x3a, 0x7f, 0xed, 0x18, 0xad, 0xe0, 0xcf, 0xd0, 0x7b, 0x21, 0x66, 0x51, 0xea,
|
||||
+ 0xc2, 0x84, 0xd5, 0x82, 0xcb, 0x6b, 0x2e, 0xed, 0x61, 0x2c, 0x85, 0x15, 0x26, 0x57, 0x5c, 0xa6,
|
||||
+ 0x78, 0x21, 0x4c, 0x02, 0x0b, 0x1a, 0x65, 0x19, 0x53, 0xea, 0x2b, 0x21, 0x1d, 0x9e, 0x0a, 0x1a,
|
||||
+ 0xef, 0xab, 0xb5, 0x7f, 0xe7, 0x1d, 0xfc, 0x94, 0x54, 0x45, 0xae, 0xef, 0xf2, 0xa5, 0x91, 0xb0,
|
||||
+ 0x60, 0x17, 0xfa, 0x6e, 0xa9, 0xdd, 0xe6, 0x01, 0xac, 0x4a, 0xae, 0xf2, 0xd8, 0xed, 0x62, 0xa9,
|
||||
+ 0xe0, 0x2f, 0xd0, 0x7d, 0x21, 0x58, 0xd1, 0x00, 0x7d, 0xe8, 0x64, 0x4c, 0x5f, 0x59, 0x25, 0xfa,
|
||||
+ 0xc6, 0xa5, 0xb1, 0x60, 0x58, 0x50, 0x6c, 0xc1, 0x34, 0x94, 0xbf, 0x07, 0x6d, 0xc5, 0x33, 0x3a,
|
||||
+ 0x61, 0xf7, 0xe0, 0x83, 0xc5, 0x74, 0x9d, 0xf1, 0x8c, 0x49, 0xa6, 0x85, 0x24, 0xf3, 0xa8, 0x19,
|
||||
+ 0xfc, 0xdd, 0xc3, 0xc4, 0x54, 0xd8, 0xe4, 0x79, 0x96, 0xb9, 0x1a, 0xca, 0xb2, 0x0c, 0x39, 0xd3,
|
||||
+ 0x48, 0xba, 0x7b, 0x39, 0x8d, 0x24, 0xba, 0x74, 0xc1, 0x94, 0x2b, 0xc9, 0xf4, 0x4d, 0xb5, 0x37,
|
||||
+ 0xba, 0xb0, 0xe8, 0xc7, 0x4f, 0xbc, 0x15, 0xea, 0x4d, 0x94, 0x11, 0x3c, 0x08, 0xfa, 0xeb, 0x61,
|
||||
+ 0xc9, 0xc0, 0x20, 0xf3, 0x94, 0x5d, 0xc4, 0x7c, 0x3a, 0x58, 0x25, 0x99, 0x23, 0x83, 0x1d, 0xcc,
|
||||
+ 0x37, 0x9b, 0xde, 0x52, 0xd5, 0xbf, 0x84, 0xee, 0xef, 0x73, 0x75, 0x55, 0x49, 0x42, 0x96, 0xab,
|
||||
+ 0xab, 0xa2, 0xbc, 0x5a, 0xaa, 0x5e, 0x22, 0x5b, 0x0b, 0x25, 0xb2, 0xd2, 0xb9, 0xda, 0xb5, 0xce,
|
||||
+ 0xf5, 0x04, 0x7a, 0xc6, 0xb8, 0xdd, 0x7e, 0x08, 0xeb, 0xd2, 0x7e, 0xbb, 0xc6, 0xe5, 0xe8, 0xe0,
|
||||
+ 0x18, 0x1d, 0x89, 0xe3, 0x9a, 0x23, 0x71, 0x5c, 0x75, 0x04, 0xa9, 0xdb, 0x1d, 0x31, 0x1b, 0xa2,
|
||||
+ 0x91, 0x77, 0xd8, 0xf0, 0x9f, 0x1e, 0x74, 0xcf, 0x58, 0x59, 0x32, 0x10, 0x7f, 0xec, 0xba, 0x6c,
|
||||
+ 0x42, 0x96, 0x42, 0xbe, 0x2d, 0xdc, 0x2d, 0xaa, 0x24, 0x96, 0x2a, 0xc0, 0xd4, 0xae, 0x83, 0xc9,
|
||||
+ 0x06, 0xa2, 0x53, 0x0d, 0x84, 0x03, 0xd3, 0xca, 0x1d, 0x60, 0x22, 0x77, 0x08, 0x4c, 0x5f, 0x57,
|
||||
+ 0xb0, 0x84, 0xdc, 0x02, 0x27, 0x5e, 0x13, 0x27, 0xad, 0x12, 0x27, 0x74, 0x0f, 0xe8, 0xd6, 0xb6,
|
||||
+ 0xdd, 0x3d, 0xa0, 0x3b, 0xeb, 0x43, 0x67, 0xca, 0x95, 0xf3, 0x8a, 0xbe, 0xab, 0xa8, 0x59, 0x69,
|
||||
+ 0xa0, 0xc6, 0x04, 0xe6, 0x46, 0xd4, 0xcc, 0x61, 0xed, 0x25, 0x4f, 0x4e, 0x98, 0x66, 0x18, 0xe2,
|
||||
+ 0x84, 0x27, 0xe7, 0x42, 0xb3, 0x98, 0x34, 0xda, 0x61, 0x41, 0xe3, 0x16, 0x09, 0x4f, 0x9e, 0x49,
|
||||
+ 0x6e, 0x52, 0xd5, 0x0e, 0x1d, 0x49, 0x80, 0xfe, 0x8a, 0x65, 0x66, 0x59, 0x9b, 0x64, 0x25, 0x03,
|
||||
+ 0x6d, 0x22, 0x41, 0x0b, 0x3b, 0xc6, 0xa6, 0xa3, 0x83, 0x6f, 0x3c, 0xda, 0x1b, 0x6b, 0x1f, 0x1e,
|
||||
+ 0x37, 0xe1, 0xc9, 0xd9, 0xdc, 0x54, 0xd5, 0x4e, 0x68, 0x29, 0xdc, 0xf7, 0x8a, 0xb3, 0x0c, 0x05,
|
||||
+ 0x2d, 0x12, 0x38, 0x12, 0xf7, 0xc5, 0xcf, 0xc3, 0x38, 0x16, 0x13, 0xda, 0xb7, 0x13, 0x96, 0x0c,
|
||||
+ 0x27, 0x1d, 0xa7, 0x9f, 0x2b, 0xb3, 0xb1, 0x95, 0x12, 0x03, 0xbd, 0x22, 0x62, 0x1a, 0x9b, 0xe6,
|
||||
+ 0xd8, 0x09, 0x0b, 0x1a, 0x7b, 0x03, 0x7e, 0x87, 0x3c, 0xe6, 0x4c, 0xd9, 0x7b, 0xd8, 0x09, 0x6b,
|
||||
+ 0xbc, 0xe0, 0x35, 0x74, 0xcf, 0xb4, 0x90, 0x6c, 0xc6, 0x29, 0x70, 0x8f, 0x60, 0x53, 0x59, 0x52,
|
||||
+ 0x46, 0x65, 0xd9, 0xab, 0x33, 0xfd, 0x27, 0xb0, 0x65, 0x19, 0x47, 0x6c, 0xf2, 0x26, 0x4a, 0x67,
|
||||
+ 0xcf, 0x94, 0x4d, 0x78, 0x83, 0x1f, 0xfc, 0xc3, 0x83, 0x5e, 0xc8, 0x67, 0x91, 0xd2, 0x72, 0x4e,
|
||||
+ 0x5b, 0x3c, 0x81, 0x2d, 0x69, 0xe8, 0x88, 0xab, 0x33, 0xce, 0xb0, 0x71, 0x9a, 0x76, 0xd8, 0xe0,
|
||||
+ 0xfb, 0x23, 0xf0, 0x4b, 0xde, 0x38, 0x55, 0x7c, 0x92, 0x4b, 0x6e, 0x21, 0xbf, 0x44, 0xe2, 0xef,
|
||||
+ 0xc2, 0xbd, 0x92, 0x7b, 0x14, 0x8b, 0xc9, 0x9b, 0x41, 0x9b, 0x94, 0x17, 0xd9, 0xc1, 0x63, 0xe8,
|
||||
+ 0xd2, 0x4b, 0xa6, 0x6c, 0xcd, 0xd7, 0x5c, 0x5e, 0x08, 0x0b, 0xe6, 0xf5, 0xd0, 0x91, 0xc1, 0x7f,
|
||||
+ 0xdb, 0xd0, 0x33, 0x9a, 0x16, 0x78, 0x4f, 0x09, 0x3f, 0xc8, 0xb2, 0x2f, 0xcc, 0xf7, 0x17, 0xaf,
|
||||
+ 0x8e, 0x45, 0x61, 0xe8, 0xf4, 0x70, 0x06, 0xb0, 0x71, 0xa1, 0x65, 0xad, 0xe5, 0x33, 0x40, 0x25,
|
||||
+ 0x0f, 0x61, 0x55, 0xdf, 0xff, 0x35, 0xf4, 0xac, 0xfb, 0x73, 0x5a, 0x6f, 0xca, 0xff, 0xf6, 0xe2,
|
||||
+ 0xfa, 0x6a, 0x94, 0xc3, 0xda, 0x0a, 0x44, 0xc9, 0x14, 0xb9, 0x42, 0xb8, 0xeb, 0x56, 0xd0, 0x78,
|
||||
+ 0x74, 0x99, 0xa7, 0x24, 0x32, 0xef, 0x17, 0x47, 0xe2, 0x83, 0xee, 0xd5, 0xf1, 0x38, 0xcc, 0x53,
|
||||
+ 0x1c, 0xe2, 0x08, 0x3d, 0x1b, 0x61, 0x85, 0x83, 0x72, 0xda, 0x9c, 0xcb, 0xd3, 0x3c, 0xa1, 0x79,
|
||||
+ 0xa8, 0x1d, 0x56, 0x38, 0x28, 0x9f, 0x89, 0x50, 0xe4, 0x3a, 0x4a, 0xb9, 0xa2, 0xb9, 0xa8, 0x1d,
|
||||
+ 0x56, 0x38, 0x36, 0x92, 0x78, 0x69, 0xec, 0x58, 0xb4, 0x2c, 0x92, 0x28, 0x0e, 0x9d, 0x1e, 0x42,
|
||||
+ 0x9a, 0xbf, 0xcd, 0xb8, 0x8c, 0x12, 0x9e, 0xe2, 0x2d, 0x05, 0x4a, 0x56, 0x8d, 0x17, 0xfc, 0x09,
|
||||
+ 0xbe, 0xff, 0x92, 0xa5, 0xd1, 0x25, 0x57, 0xfa, 0x98, 0x9e, 0x8f, 0x2e, 0xc9, 0x01, 0xf4, 0x12,
|
||||
+ 0x2b, 0xa0, 0x39, 0xc9, 0xbe, 0x95, 0xaa, 0x3c, 0xbc, 0x6d, 0x8e, 0x76, 0xb5, 0xb5, 0x64, 0x04,
|
||||
+ 0x07, 0xf0, 0x60, 0xd1, 0xf4, 0x9d, 0xe3, 0xc2, 0x37, 0x1e, 0xbc, 0xef, 0x16, 0x1d, 0xa6, 0xa9,
|
||||
+ 0xd0, 0xdf, 0xd1, 0x23, 0xac, 0x65, 0x96, 0x76, 0xcf, 0x1e, 0x47, 0x63, 0x09, 0xa5, 0xfb, 0x63,
|
||||
+ 0xcb, 0x3d, 0xdd, 0x99, 0x3e, 0xb4, 0x84, 0xb2, 0x59, 0x6e, 0x09, 0x85, 0x59, 0x10, 0xea, 0x19,
|
||||
+ 0x67, 0x3a, 0x97, 0x5c, 0x0d, 0x56, 0xcc, 0x6c, 0x58, 0x72, 0x08, 0xfa, 0x4c, 0x46, 0x2c, 0xd5,
|
||||
+ 0x36, 0xc5, 0x8e, 0x0c, 0x7e, 0x51, 0x9e, 0x76, 0x9c, 0xaa, 0x8c, 0x4f, 0xf4, 0x77, 0xf0, 0x3b,
|
||||
+ 0xf8, 0x71, 0x79, 0xec, 0x62, 0xb5, 0x0d, 0x16, 0x56, 0x7e, 0xa6, 0x19, 0x2d, 0xeb, 0x85, 0xf4,
|
||||
+ 0x1d, 0xbc, 0x84, 0xf7, 0x9c, 0x7a, 0xb5, 0xf7, 0xbf, 0x4b, 0x84, 0x5c, 0x23, 0x69, 0x95, 0x8d,
|
||||
+ 0x24, 0x18, 0xc1, 0xfd, 0xba, 0xb9, 0xdb, 0x1f, 0x65, 0x07, 0xff, 0xea, 0xc2, 0xda, 0xb1, 0xf9,
|
||||
+ 0xef, 0xe1, 0x9f, 0xc0, 0x0a, 0xcd, 0x82, 0xfe, 0xf6, 0xd2, 0x31, 0xdd, 0xba, 0x36, 0xfc, 0xe0,
|
||||
+ 0x06, 0x69, 0xf9, 0xba, 0xb6, 0x6f, 0xe1, 0x66, 0x6f, 0xad, 0x8e, 0x8f, 0xc3, 0x0f, 0x6f, 0x12,
|
||||
+ 0x1b, 0x43, 0xfb, 0x9e, 0x7f, 0x08, 0x1d, 0x3a, 0xe8, 0xc3, 0xe5, 0xe3, 0x99, 0x31, 0xb3, 0x7d,
|
||||
+ 0xdb, 0xec, 0xe6, 0x1f, 0xc1, 0x9a, 0x9b, 0x75, 0x1e, 0x34, 0x26, 0xe2, 0xcf, 0x92, 0x4c, 0xcf,
|
||||
+ 0x87, 0x1f, 0x2d, 0x1a, 0x58, 0x1c, 0xae, 0x8e, 0xa1, 0x83, 0xb1, 0x6c, 0xba, 0x51, 0x49, 0x58,
|
||||
+ 0xd3, 0x8d, 0x6a, 0xf8, 0xf7, 0x3d, 0x63, 0x24, 0x8e, 0x97, 0x19, 0x29, 0x1e, 0x5a, 0xcb, 0x8c,
|
||||
+ 0x94, 0x0f, 0xa8, 0x7d, 0x0f, 0x63, 0x6b, 0xe6, 0xa0, 0x66, 0x6c, 0x6b, 0x23, 0x57, 0x33, 0xb6,
|
||||
+ 0xf5, 0xf1, 0x69, 0xdf, 0xf3, 0x7f, 0x0b, 0xdd, 0xca, 0x98, 0x73, 0x63, 0x70, 0x3e, 0x7e, 0x87,
|
||||
+ 0xd9, 0x08, 0x81, 0x43, 0x93, 0x46, 0x13, 0x38, 0xd5, 0x01, 0xa7, 0x09, 0x9c, 0xfa, 0x78, 0xf2,
|
||||
+ 0x1c, 0x56, 0xcd, 0x24, 0xe1, 0x2f, 0x53, 0x2c, 0x87, 0x93, 0xe6, 0xe1, 0x16, 0x06, 0x90, 0x63,
|
||||
+ 0xe8, 0xd0, 0x93, 0xbf, 0x09, 0x9b, 0x72, 0xfc, 0x58, 0x02, 0x9b, 0xca, 0xdb, 0xdc, 0x84, 0xda,
|
||||
+ 0xfc, 0x85, 0x69, 0x7a, 0x53, 0xfb, 0x01, 0xd4, 0xf4, 0xa6, 0xfe, 0xf3, 0x66, 0xdf, 0xf3, 0x3f,
|
||||
+ 0x85, 0xf6, 0x39, 0x9b, 0xf9, 0xc3, 0x45, 0xc5, 0xf2, 0x4f, 0xc6, 0xf0, 0x86, 0xf0, 0xe3, 0x51,
|
||||
+ 0xe8, 0xc5, 0xd9, 0x6c, 0x9a, 0xe5, 0x63, 0xb9, 0x79, 0x94, 0xea, 0x83, 0xd1, 0x5c, 0x23, 0xea,
|
||||
+ 0x86, 0x0d, 0x23, 0x95, 0x97, 0x40, 0xd3, 0x48, 0xad, 0xf9, 0xbf, 0x86, 0x7e, 0xbd, 0x01, 0xf8,
|
||||
+ 0x3f, 0x68, 0xf4, 0xac, 0x65, 0xbd, 0x67, 0xf8, 0xc9, 0x5d, 0x6a, 0x76, 0x83, 0x33, 0xd8, 0x5a,
|
||||
+ 0x6c, 0x16, 0xfe, 0xe3, 0x9b, 0xd6, 0x2e, 0xb4, 0x93, 0x1b, 0xa3, 0x77, 0x01, 0xf7, 0x16, 0x4a,
|
||||
+ 0xb1, 0x7f, 0xa3, 0x3f, 0xf5, 0x4a, 0x3f, 0x7c, 0x7c, 0xa7, 0x9e, 0x75, 0xfc, 0x4b, 0xe8, 0x55,
|
||||
+ 0x0b, 0xae, 0xff, 0xf1, 0x4d, 0x0b, 0xab, 0xc5, 0xe2, 0xd1, 0xed, 0x4a, 0x2e, 0x73, 0x17, 0xab,
|
||||
+ 0x74, 0xa0, 0x9f, 0x7c, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x24, 0x62, 0xe9, 0xd4, 0xa8, 0x16, 0x00,
|
||||
+ 0x00,
|
||||
}
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
diff --git a/api/services/control.proto b/api/services/control.proto
|
||||
index e67b8e93..5eb2b576 100644
|
||||
--- a/api/services/control.proto
|
||||
+++ b/api/services/control.proto
|
||||
@@ -215,6 +215,27 @@ message LogoutResponse {
|
||||
message LoadRequest {
|
||||
// path is the path of loading file
|
||||
string path = 1;
|
||||
+ // loadID is the unique ID for each time load
|
||||
+ // also is the part of construct temporary path to
|
||||
+ // store transport file
|
||||
+ string loadID = 2;
|
||||
+ // SeparatorLoad is the info to load separated image
|
||||
+ SeparatorLoad sep = 3;
|
||||
+}
|
||||
+
|
||||
+message SeparatorLoad {
|
||||
+ // app is application image name
|
||||
+ string app = 1;
|
||||
+ // dir is image tarballs directory
|
||||
+ string dir = 2;
|
||||
+ // base is base image tarball path
|
||||
+ string base = 3;
|
||||
+ // lib is library image tarball path
|
||||
+ string lib = 4;
|
||||
+ // skipCheck is flag to skip sha256 check sum for images
|
||||
+ bool skipCheck = 5;
|
||||
+ // enabled is flag to indicate the separator function enabled or not
|
||||
+ bool enabled = 6;
|
||||
}
|
||||
|
||||
message LoadResponse {
|
||||
@@ -259,6 +280,21 @@ message SaveRequest {
|
||||
string path = 3;
|
||||
// format is the format of image saved to archive file, such as docker-archive, oci-archive
|
||||
string format = 4;
|
||||
+ // SeparatorSave is the info to save separated image
|
||||
+ SeparatorSave sep = 5;
|
||||
+}
|
||||
+
|
||||
+message SeparatorSave {
|
||||
+ // base is base image name
|
||||
+ string base = 1;
|
||||
+ // lib is library image name
|
||||
+ string lib = 2;
|
||||
+ // rename is rename json file
|
||||
+ string rename = 3;
|
||||
+ // dest is destination file directory
|
||||
+ string dest = 4;
|
||||
+ // enabled is flag to indicate the separator function enabled or not
|
||||
+ bool enabled = 5;
|
||||
}
|
||||
|
||||
message SaveResponse {
|
||||
--
|
||||
2.27.0
|
||||
|
||||
248
patch/0073-cli-finish-client-save-separated-image.patch
Normal file
248
patch/0073-cli-finish-client-save-separated-image.patch
Normal file
@ -0,0 +1,248 @@
|
||||
From 8bb2cb6f3904f13d0010cc207e9b00bafe043805 Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Tue, 26 Oct 2021 14:19:10 +0800
|
||||
Subject: [PATCH 02/16] cli:finish client save separated image
|
||||
|
||||
reason: support isula-build client side process info for save separated image
|
||||
ABI change:(client)
|
||||
- --dest: destination file direcotry to store seprated image
|
||||
- --base: base image name of separated images
|
||||
- --lib: lib image name of separated images
|
||||
- --rename: rename json file path of separated images
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
cmd/cli/save.go | 121 ++++++++++++++++++++++++++++++++++++++----------
|
||||
util/common.go | 24 ++++++++++
|
||||
2 files changed, 121 insertions(+), 24 deletions(-)
|
||||
|
||||
diff --git a/cmd/cli/save.go b/cmd/cli/save.go
|
||||
index cb78ecfb..4d22798a 100644
|
||||
--- a/cmd/cli/save.go
|
||||
+++ b/cmd/cli/save.go
|
||||
@@ -29,8 +29,17 @@ import (
|
||||
"isula.org/isula-build/util"
|
||||
)
|
||||
|
||||
+type separatorSaveOption struct {
|
||||
+ baseImgName string
|
||||
+ libImageName string
|
||||
+ renameFile string
|
||||
+ destPath string
|
||||
+ enabled bool
|
||||
+}
|
||||
+
|
||||
type saveOptions struct {
|
||||
images []string
|
||||
+ sep separatorSaveOption
|
||||
path string
|
||||
saveID string
|
||||
format string
|
||||
@@ -41,7 +50,9 @@ var saveOpts saveOptions
|
||||
const (
|
||||
saveExample = `isula-build ctr-img save busybox:latest -o busybox.tar
|
||||
isula-build ctr-img save 21c3e96ac411 -o myimage.tar
|
||||
-isula-build ctr-img save busybox:latest alpine:3.9 -o all.tar`
|
||||
+isula-build ctr-img save busybox:latest alpine:3.9 -o all.tar
|
||||
+isula-build ctr-img save app:latest app1:latest -d Images
|
||||
+isula-build ctr-img save app:latest app1:latest -d Images -b busybox:latest -l lib:latest -r rename.json`
|
||||
)
|
||||
|
||||
// NewSaveCmd cmd for container image saving
|
||||
@@ -54,6 +65,10 @@ func NewSaveCmd() *cobra.Command {
|
||||
}
|
||||
|
||||
saveCmd.PersistentFlags().StringVarP(&saveOpts.path, "output", "o", "", "Path to save the tarball")
|
||||
+ saveCmd.PersistentFlags().StringVarP(&saveOpts.sep.destPath, "dest", "d", "Images", "Destination file directory to store separated images")
|
||||
+ saveCmd.PersistentFlags().StringVarP(&saveOpts.sep.baseImgName, "base", "b", "", "Base image name of separated images")
|
||||
+ saveCmd.PersistentFlags().StringVarP(&saveOpts.sep.libImageName, "lib", "l", "", "Lib image name of separated images")
|
||||
+ saveCmd.PersistentFlags().StringVarP(&saveOpts.sep.renameFile, "rename", "r", "", "Rename json file path of separated images")
|
||||
if util.CheckCliExperimentalEnabled() {
|
||||
saveCmd.PersistentFlags().StringVarP(&saveOpts.format, "format", "f", "oci", "Format of image saving to local tarball")
|
||||
} else {
|
||||
@@ -67,16 +82,7 @@ func saveCommand(cmd *cobra.Command, args []string) error {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
- if len(args) == 0 {
|
||||
- return errors.New("save accepts at least one image")
|
||||
- }
|
||||
- if saveOpts.format == constant.OCITransport && len(args) >= 2 {
|
||||
- return errors.New("oci image format now only supports saving single image")
|
||||
- }
|
||||
- if err := util.CheckImageFormat(saveOpts.format); err != nil {
|
||||
- return err
|
||||
- }
|
||||
- if err := checkSavePath(); err != nil {
|
||||
+ if err := saveOpts.checkSaveOpts(args); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -88,25 +94,79 @@ func saveCommand(cmd *cobra.Command, args []string) error {
|
||||
return runSave(ctx, cli, args)
|
||||
}
|
||||
|
||||
-func checkSavePath() error {
|
||||
- if len(saveOpts.path) == 0 {
|
||||
- return errors.New("output path should not be empty")
|
||||
+func (sep *separatorSaveOption) check(pwd string) error {
|
||||
+ if len(sep.baseImgName) != 0 {
|
||||
+ if !util.IsValidImageName(sep.baseImgName) {
|
||||
+ return errors.Errorf("invalid base image name %s", sep.baseImgName)
|
||||
+ }
|
||||
}
|
||||
+ if len(sep.libImageName) != 0 {
|
||||
+ if !util.IsValidImageName(sep.libImageName) {
|
||||
+ return errors.Errorf("invalid lib image name %s", sep.libImageName)
|
||||
+ }
|
||||
+ }
|
||||
+ if len(sep.destPath) == 0 {
|
||||
+ sep.destPath = "Images"
|
||||
+ }
|
||||
+ if !filepath.IsAbs(sep.destPath) {
|
||||
+ sep.destPath = util.MakeAbsolute(sep.destPath, pwd)
|
||||
+ }
|
||||
+ if util.IsExist(sep.destPath) {
|
||||
+ return errors.Errorf("output file already exist: %q, try to remove existing tarball or rename output file", sep.destPath)
|
||||
+ }
|
||||
+ if len(sep.renameFile) != 0 {
|
||||
+ if !filepath.IsAbs(sep.renameFile) {
|
||||
+ sep.renameFile = util.MakeAbsolute(sep.renameFile, pwd)
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
|
||||
- if strings.Contains(saveOpts.path, ":") {
|
||||
- return errors.Errorf("colon in path %q is not supported", saveOpts.path)
|
||||
+func (opt *saveOptions) checkSaveOpts(args []string) error {
|
||||
+ if len(args) == 0 {
|
||||
+ return errors.New("save accepts at least one image")
|
||||
}
|
||||
|
||||
- if !filepath.IsAbs(saveOpts.path) {
|
||||
- pwd, err := os.Getwd()
|
||||
- if err != nil {
|
||||
- return errors.New("get current path failed")
|
||||
+ if strings.Contains(opt.path, ":") || strings.Contains(opt.sep.destPath, ":") {
|
||||
+ return errors.Errorf("colon in path %q is not supported", opt.path)
|
||||
+ }
|
||||
+ pwd, err := os.Getwd()
|
||||
+ if err != nil {
|
||||
+ return errors.New("get current path failed")
|
||||
+ }
|
||||
+
|
||||
+ // normal save
|
||||
+ if !opt.sep.isEnabled() {
|
||||
+ // only check oci format when doing normal save operation
|
||||
+ if opt.format == constant.OCITransport && len(args) >= 2 {
|
||||
+ return errors.New("oci image format now only supports saving single image")
|
||||
+ }
|
||||
+ if err := util.CheckImageFormat(opt.format); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ if len(opt.path) == 0 {
|
||||
+ return errors.New("output path should not be empty")
|
||||
}
|
||||
- saveOpts.path = util.MakeAbsolute(saveOpts.path, pwd)
|
||||
+ if !filepath.IsAbs(opt.path) {
|
||||
+ opt.path = util.MakeAbsolute(opt.path, pwd)
|
||||
+ }
|
||||
+ if util.IsExist(opt.path) {
|
||||
+ return errors.Errorf("output file already exist: %q, try to remove existing tarball or rename output file", opt.path)
|
||||
+ }
|
||||
+ return nil
|
||||
}
|
||||
|
||||
- if util.IsExist(saveOpts.path) {
|
||||
- return errors.Errorf("output file already exist: %q, try to remove existing tarball or rename output file", saveOpts.path)
|
||||
+ // separator save
|
||||
+ opt.sep.enabled = true
|
||||
+ if len(opt.path) != 0 {
|
||||
+ return errors.New("conflict options between -o and [-b -l -r]")
|
||||
+ }
|
||||
+ // separate image only support docker image spec
|
||||
+ opt.format = constant.DockerTransport
|
||||
+
|
||||
+ if err := opt.sep.check(pwd); err != nil {
|
||||
+ return err
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -116,11 +176,20 @@ func runSave(ctx context.Context, cli Cli, args []string) error {
|
||||
saveOpts.saveID = util.GenerateNonCryptoID()[:constant.DefaultIDLen]
|
||||
saveOpts.images = args
|
||||
|
||||
+ sep := &pb.SeparatorSave{
|
||||
+ Base: saveOpts.sep.baseImgName,
|
||||
+ Lib: saveOpts.sep.libImageName,
|
||||
+ Rename: saveOpts.sep.renameFile,
|
||||
+ Dest: saveOpts.sep.destPath,
|
||||
+ Enabled: saveOpts.sep.enabled,
|
||||
+ }
|
||||
+
|
||||
saveStream, err := cli.Client().Save(ctx, &pb.SaveRequest{
|
||||
Images: saveOpts.images,
|
||||
Path: saveOpts.path,
|
||||
SaveID: saveOpts.saveID,
|
||||
Format: saveOpts.format,
|
||||
+ Sep: sep,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -137,7 +206,11 @@ func runSave(ctx context.Context, cli Cli, args []string) error {
|
||||
fmt.Printf("Save success with image: %s\n", saveOpts.images)
|
||||
return nil
|
||||
}
|
||||
- return errors.Errorf("save image failed: %v", err)
|
||||
+ return errors.Errorf("save image failed: %v", err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
+
|
||||
+func (sep *separatorSaveOption) isEnabled() bool {
|
||||
+ return util.AnyFlagSet(sep.baseImgName, sep.libImageName, sep.renameFile)
|
||||
+}
|
||||
diff --git a/util/common.go b/util/common.go
|
||||
index 00b1b941..4782b2ec 100644
|
||||
--- a/util/common.go
|
||||
+++ b/util/common.go
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"strings"
|
||||
|
||||
securejoin "github.com/cyphar/filepath-securejoin"
|
||||
+ "github.com/docker/distribution/reference"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/sys/unix"
|
||||
@@ -184,3 +185,26 @@ func FormatSize(size, base float64) string {
|
||||
func CheckCliExperimentalEnabled() bool {
|
||||
return os.Getenv("ISULABUILD_CLI_EXPERIMENTAL") == "enabled"
|
||||
}
|
||||
+
|
||||
+// IsValidImageName will check the validity of image name
|
||||
+func IsValidImageName(name string) bool {
|
||||
+ ref, err := reference.ParseNormalizedNamed(name)
|
||||
+ if err != nil {
|
||||
+ return false
|
||||
+ }
|
||||
+ if _, canonical := ref.(reference.Canonical); canonical {
|
||||
+ return false
|
||||
+ }
|
||||
+ return true
|
||||
+}
|
||||
+
|
||||
+// AnyFlagSet is a checker to indicate there exist flag's length not empty
|
||||
+// If all flags are empty, will return false
|
||||
+func AnyFlagSet(flags ...string) bool {
|
||||
+ for _, flag := range flags {
|
||||
+ if len(flag) != 0 {
|
||||
+ return true
|
||||
+ }
|
||||
+ }
|
||||
+ return false
|
||||
+}
|
||||
--
|
||||
2.27.0
|
||||
|
||||
995
patch/0074-daemon-finish-daemon-save-separated-image.patch
Normal file
995
patch/0074-daemon-finish-daemon-save-separated-image.patch
Normal file
@ -0,0 +1,995 @@
|
||||
From 8cf5db787c507ce9d4c78191395b25b2f5e0d253 Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Tue, 26 Oct 2021 14:19:27 +0800
|
||||
Subject: [PATCH 03/16] daemon:finish daemon save separated image
|
||||
|
||||
reason: support isula-build daemon side save separated image
|
||||
ABI change(daemon): none
|
||||
Save process changes:
|
||||
1. add separate image action at the end of save process(already got tarball)
|
||||
- input: saved tarball
|
||||
- output: separated images
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
daemon/save.go | 647 ++++++++++++++++++++++++++++++++++++++++++++++++-
|
||||
util/cipher.go | 78 ++++++
|
||||
util/file.go | 153 ++++++++++++
|
||||
3 files changed, 872 insertions(+), 6 deletions(-)
|
||||
create mode 100644 util/file.go
|
||||
|
||||
diff --git a/daemon/save.go b/daemon/save.go
|
||||
index ee706911..ecac5b68 100644
|
||||
--- a/daemon/save.go
|
||||
+++ b/daemon/save.go
|
||||
@@ -15,11 +15,17 @@ package daemon
|
||||
|
||||
import (
|
||||
"context"
|
||||
+ "encoding/json"
|
||||
+ "fmt"
|
||||
+ "io/ioutil"
|
||||
"os"
|
||||
+ "path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/containers/image/v5/docker/reference"
|
||||
"github.com/containers/image/v5/types"
|
||||
+ "github.com/containers/storage/pkg/archive"
|
||||
+ "github.com/docker/docker/pkg/ioutils"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/sirupsen/logrus"
|
||||
"golang.org/x/sync/errgroup"
|
||||
@@ -34,6 +40,22 @@ import (
|
||||
"isula.org/isula-build/util"
|
||||
)
|
||||
|
||||
+const (
|
||||
+ manifestDataFile = "manifest.json"
|
||||
+ manifestFile = "manifest"
|
||||
+ repositoriesFile = "repositories"
|
||||
+ baseTarNameSuffix = "_base_image.tar.gz"
|
||||
+ appTarNameSuffix = "_app_image.tar.gz"
|
||||
+ libTarNameSuffix = "_lib_image.tar.gz"
|
||||
+ untarTempDirName = "untar"
|
||||
+ baseUntarTempDirName = "base_images"
|
||||
+ appUntarTempDirName = "app_images"
|
||||
+ libUntarTempDirName = "lib_images"
|
||||
+ unionTarName = "all.tar"
|
||||
+ layerTarName = "layer.tar"
|
||||
+ tarSuffix = ".tar"
|
||||
+)
|
||||
+
|
||||
type savedImage struct {
|
||||
exist bool
|
||||
tags []reference.NamedTagged
|
||||
@@ -42,18 +64,92 @@ type savedImage struct {
|
||||
type saveOptions struct {
|
||||
sysCtx *types.SystemContext
|
||||
localStore *store.Store
|
||||
+ logger *logger.Logger
|
||||
+ logEntry *logrus.Entry
|
||||
saveID string
|
||||
format string
|
||||
+ outputPath string
|
||||
oriImgList []string
|
||||
finalImageOrdered []string
|
||||
finalImageSet map[string]*savedImage
|
||||
- outputPath string
|
||||
- logger *logger.Logger
|
||||
- logEntry *logrus.Entry
|
||||
+ sep separatorSave
|
||||
}
|
||||
|
||||
-func (b *Backend) getSaveOptions(req *pb.SaveRequest) saveOptions {
|
||||
- return saveOptions{
|
||||
+type separatorSave struct {
|
||||
+ renameData []renames
|
||||
+ tmpDir imageTmpDir
|
||||
+ log *logrus.Entry
|
||||
+ base string
|
||||
+ lib string
|
||||
+ dest string
|
||||
+ enabled bool
|
||||
+}
|
||||
+
|
||||
+type renames struct {
|
||||
+ Name string `json:"name"`
|
||||
+ Rename string `json:"rename"`
|
||||
+}
|
||||
+
|
||||
+type imageTmpDir struct {
|
||||
+ app string
|
||||
+ base string
|
||||
+ lib string
|
||||
+ untar string
|
||||
+ root string
|
||||
+}
|
||||
+
|
||||
+type layer struct {
|
||||
+ all []string
|
||||
+ base []string
|
||||
+ lib []string
|
||||
+ app []string
|
||||
+}
|
||||
+
|
||||
+type imageInfo struct {
|
||||
+ layers layer
|
||||
+ repoTags []string
|
||||
+ config string
|
||||
+ name string
|
||||
+ tag string
|
||||
+ nameTag string
|
||||
+ topLayer string
|
||||
+}
|
||||
+
|
||||
+// imageManifest return image's manifest info
|
||||
+type imageManifest struct {
|
||||
+ Config string `json:"Config"`
|
||||
+ RepoTags []string `json:"RepoTags"`
|
||||
+ Layers []string `json:"Layers"`
|
||||
+ // Not shown in the json file
|
||||
+ HashMap map[string]string `json:"-"`
|
||||
+}
|
||||
+
|
||||
+type imageLayersMap map[string]string
|
||||
+
|
||||
+type tarballInfo struct {
|
||||
+ AppTarName string `json:"app"`
|
||||
+ AppHash string `json:"appHash"`
|
||||
+ AppLayers []string `json:"appLayers"`
|
||||
+ LibTarName string `json:"lib"`
|
||||
+ LibHash string `json:"libHash"`
|
||||
+ LibImageName string `json:"libImageName"`
|
||||
+ LibLayers []string `json:"libLayers"`
|
||||
+ BaseTarName string `json:"base"`
|
||||
+ BaseHash string `json:"baseHash"`
|
||||
+ BaseImageName string `json:"baseImageName"`
|
||||
+ BaseLayers []string `json:"baseLayer"`
|
||||
+}
|
||||
+
|
||||
+func (b *Backend) getSaveOptions(req *pb.SaveRequest) (saveOptions, error) {
|
||||
+ var sep = separatorSave{
|
||||
+ base: req.GetSep().GetBase(),
|
||||
+ lib: req.GetSep().GetLib(),
|
||||
+ dest: req.GetSep().GetDest(),
|
||||
+ log: logrus.WithFields(logrus.Fields{"SaveID": req.GetSaveID()}),
|
||||
+ enabled: req.GetSep().GetEnabled(),
|
||||
+ }
|
||||
+
|
||||
+ var opt = saveOptions{
|
||||
sysCtx: image.GetSystemContext(),
|
||||
localStore: b.daemon.localStore,
|
||||
saveID: req.GetSaveID(),
|
||||
@@ -64,7 +160,38 @@ func (b *Backend) getSaveOptions(req *pb.SaveRequest) saveOptions {
|
||||
outputPath: req.GetPath(),
|
||||
logger: logger.NewCliLogger(constant.CliLogBufferLen),
|
||||
logEntry: logrus.WithFields(logrus.Fields{"SaveID": req.GetSaveID(), "Format": req.GetFormat()}),
|
||||
+ sep: sep,
|
||||
+ }
|
||||
+ // normal save
|
||||
+ if !sep.enabled {
|
||||
+ return opt, nil
|
||||
+ }
|
||||
+
|
||||
+ // save separated image
|
||||
+ tmpRoot := filepath.Join(b.daemon.opts.DataRoot, filepath.Join(dataRootTmpDirPrefix, req.GetSaveID()))
|
||||
+ untar := filepath.Join(tmpRoot, untarTempDirName)
|
||||
+ appDir := filepath.Join(tmpRoot, appUntarTempDirName)
|
||||
+ baseDir := filepath.Join(tmpRoot, baseUntarTempDirName)
|
||||
+ libDir := filepath.Join(tmpRoot, libUntarTempDirName)
|
||||
+
|
||||
+ opt.sep.tmpDir = imageTmpDir{
|
||||
+ app: appDir,
|
||||
+ base: baseDir,
|
||||
+ lib: libDir,
|
||||
+ untar: untar,
|
||||
+ root: tmpRoot,
|
||||
}
|
||||
+ opt.outputPath = filepath.Join(untar, unionTarName)
|
||||
+ renameFile := req.GetSep().GetRename()
|
||||
+ if len(renameFile) != 0 {
|
||||
+ var reName []renames
|
||||
+ if err := util.LoadJSONFile(renameFile, &reName); err != nil {
|
||||
+ return saveOptions{}, err
|
||||
+ }
|
||||
+ opt.sep.renameData = reName
|
||||
+ }
|
||||
+
|
||||
+ return opt, nil
|
||||
}
|
||||
|
||||
// Save receives a save request and save the image(s) into tarball
|
||||
@@ -75,7 +202,10 @@ func (b *Backend) Save(req *pb.SaveRequest, stream pb.Control_SaveServer) error
|
||||
}).Info("SaveRequest received")
|
||||
|
||||
var err error
|
||||
- opts := b.getSaveOptions(req)
|
||||
+ opts, err := b.getSaveOptions(req)
|
||||
+ if err != nil {
|
||||
+ return errors.Wrap(err, "process save options failed")
|
||||
+ }
|
||||
|
||||
if err = checkFormat(&opts); err != nil {
|
||||
return err
|
||||
@@ -103,6 +233,11 @@ func (b *Backend) Save(req *pb.SaveRequest, stream pb.Control_SaveServer) error
|
||||
return err
|
||||
}
|
||||
|
||||
+ // separatorSave found
|
||||
+ if opts.sep.enabled {
|
||||
+ return separateImage(opts)
|
||||
+ }
|
||||
+
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -118,6 +253,9 @@ func exportHandler(ctx context.Context, opts *saveOptions) func() error {
|
||||
}
|
||||
}()
|
||||
|
||||
+ if err := os.MkdirAll(filepath.Dir(opts.outputPath), constant.DefaultRootFileMode); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
for _, imageID := range opts.finalImageOrdered {
|
||||
copyCtx := *opts.sysCtx
|
||||
if opts.format == constant.DockerArchiveTransport {
|
||||
@@ -210,3 +348,500 @@ func filterImageName(opts *saveOptions) error {
|
||||
|
||||
return nil
|
||||
}
|
||||
+
|
||||
+func getLayerHashFromStorage(store *store.Store, name string) ([]string, error) {
|
||||
+ if len(name) == 0 {
|
||||
+ return nil, nil
|
||||
+ }
|
||||
+ _, img, err := image.FindImage(store, name)
|
||||
+ if err != nil {
|
||||
+ return nil, err
|
||||
+ }
|
||||
+
|
||||
+ layer, err := store.Layer(img.TopLayer)
|
||||
+ if err != nil {
|
||||
+ return nil, errors.Wrapf(err, "failed to get top layer for image %s", name)
|
||||
+ }
|
||||
+
|
||||
+ var layers []string
|
||||
+ // add each layer in the layers until reach the root layer
|
||||
+ for layer != nil {
|
||||
+ fields := strings.Split(layer.UncompressedDigest.String(), ":")
|
||||
+ if len(fields) != 2 {
|
||||
+ return nil, errors.Errorf("error format of layer of image %s", name)
|
||||
+ }
|
||||
+ layers = append(layers, fields[1])
|
||||
+ if layer.Parent == "" {
|
||||
+ break
|
||||
+ }
|
||||
+ layer, err = store.Layer(layer.Parent)
|
||||
+ if err != nil {
|
||||
+ return nil, errors.Wrapf(err, "unable to read layer %q", layer.Parent)
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return layers, nil
|
||||
+}
|
||||
+
|
||||
+// process physic file
|
||||
+func (s *separatorSave) constructLayerMap() (map[string]string, error) {
|
||||
+ path := s.tmpDir.untar
|
||||
+ files, rErr := ioutil.ReadDir(path)
|
||||
+ if rErr != nil {
|
||||
+ return nil, rErr
|
||||
+ }
|
||||
+
|
||||
+ var layerMap = make(map[string]string, len(files))
|
||||
+ // process layer's file
|
||||
+ for _, file := range files {
|
||||
+ if file.IsDir() {
|
||||
+ layerFile := filepath.Join(path, file.Name(), layerTarName)
|
||||
+ oriFile, err := os.Readlink(layerFile)
|
||||
+ if err != nil {
|
||||
+ return nil, err
|
||||
+ }
|
||||
+ physicFile := filepath.Join(path, file.Name(), oriFile)
|
||||
+ layerMap[filepath.Base(physicFile)] = filepath.Join(file.Name(), layerTarName)
|
||||
+ if err := os.Rename(physicFile, layerFile); err != nil {
|
||||
+ return nil, err
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return layerMap, nil
|
||||
+}
|
||||
+
|
||||
+func getLayerHashFromTar(layerMap map[string]string, layer []string) map[string]string {
|
||||
+ hashMap := make(map[string]string, len(layer))
|
||||
+ // first reverse map since it's <k-v> is unique
|
||||
+ revMap := make(map[string]string, len(layerMap))
|
||||
+ for k, v := range layerMap {
|
||||
+ revMap[v] = k
|
||||
+ }
|
||||
+ for _, l := range layer {
|
||||
+ if v, ok := revMap[l]; ok {
|
||||
+ // format is like xxx(hash): xxx/layer.tar
|
||||
+ hashMap[strings.TrimSuffix(v, tarSuffix)] = l
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return hashMap
|
||||
+}
|
||||
+
|
||||
+func (s *separatorSave) adjustLayers() ([]imageManifest, error) {
|
||||
+ s.log.Info("Adjusting layers for saving separated image")
|
||||
+
|
||||
+ layerMap, err := s.constructLayerMap()
|
||||
+ if err != nil {
|
||||
+ s.log.Errorf("Process layers failed: %v", err)
|
||||
+ return nil, err
|
||||
+ }
|
||||
+
|
||||
+ // process manifest file
|
||||
+ var man []imageManifest
|
||||
+ if lErr := util.LoadJSONFile(filepath.Join(s.tmpDir.untar, manifestDataFile), &man); lErr != nil {
|
||||
+ return nil, lErr
|
||||
+ }
|
||||
+
|
||||
+ for i, img := range man {
|
||||
+ layers := make([]string, len(img.Layers))
|
||||
+ for i, layer := range img.Layers {
|
||||
+ layers[i] = layerMap[layer]
|
||||
+ }
|
||||
+ man[i].Layers = layers
|
||||
+ man[i].HashMap = getLayerHashFromTar(layerMap, layers)
|
||||
+ }
|
||||
+ buf, err := json.Marshal(&man)
|
||||
+ if err != nil {
|
||||
+ return nil, err
|
||||
+ }
|
||||
+ if err := ioutils.AtomicWriteFile(manifestFile, buf, constant.DefaultSharedFileMode); err != nil {
|
||||
+ return nil, err
|
||||
+ }
|
||||
+
|
||||
+ return man, nil
|
||||
+}
|
||||
+
|
||||
+func separateImage(opt saveOptions) error {
|
||||
+ s := &opt.sep
|
||||
+ s.log.Infof("Start saving separated images %v", opt.oriImgList)
|
||||
+ var errList []error
|
||||
+
|
||||
+ if err := os.MkdirAll(s.dest, constant.DefaultRootDirMode); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+
|
||||
+ defer func() {
|
||||
+ if tErr := os.RemoveAll(s.tmpDir.root); tErr != nil && !os.IsNotExist(tErr) {
|
||||
+ s.log.Warnf("Removing save tmp directory %q failed: %v", s.tmpDir.root, tErr)
|
||||
+ }
|
||||
+ if len(errList) != 0 {
|
||||
+ if rErr := os.RemoveAll(s.dest); rErr != nil && !os.IsNotExist(rErr) {
|
||||
+ s.log.Warnf("Removing save dest directory %q failed: %v", s.dest, rErr)
|
||||
+ }
|
||||
+ }
|
||||
+ }()
|
||||
+ if err := util.UnpackFile(opt.outputPath, s.tmpDir.untar, archive.Gzip, true); err != nil {
|
||||
+ errList = append(errList, err)
|
||||
+ return errors.Wrapf(err, "unpack %q failed", opt.outputPath)
|
||||
+ }
|
||||
+ manifest, err := s.adjustLayers()
|
||||
+ if err != nil {
|
||||
+ errList = append(errList, err)
|
||||
+ return errors.Wrap(err, "adjust layers failed")
|
||||
+ }
|
||||
+
|
||||
+ imgInfos, err := s.constructImageInfos(manifest, opt.localStore)
|
||||
+ if err != nil {
|
||||
+ errList = append(errList, err)
|
||||
+ return errors.Wrap(err, "process image infos failed")
|
||||
+ }
|
||||
+
|
||||
+ if err := s.processImageLayers(imgInfos); err != nil {
|
||||
+ errList = append(errList, err)
|
||||
+ return err
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (s *separatorSave) processImageLayers(imgInfos map[string]imageInfo) error {
|
||||
+ s.log.Info("Processing image layers")
|
||||
+ var (
|
||||
+ tarballs = make(map[string]tarballInfo)
|
||||
+ baseImagesMap = make(imageLayersMap, 1)
|
||||
+ libImagesMap = make(imageLayersMap, 1)
|
||||
+ appImagesMap = make(imageLayersMap, 1)
|
||||
+ )
|
||||
+ for _, info := range imgInfos {
|
||||
+ if err := s.clearDirs(true); err != nil {
|
||||
+ return errors.Wrap(err, "clear tmp dirs failed")
|
||||
+ }
|
||||
+ var t tarballInfo
|
||||
+ // process base
|
||||
+ if err := info.processBaseImg(s, baseImagesMap, &t); err != nil {
|
||||
+ return errors.Wrapf(err, "process base images %s failed", info.nameTag)
|
||||
+ }
|
||||
+ // process lib
|
||||
+ if err := info.processLibImg(s, libImagesMap, &t); err != nil {
|
||||
+ return errors.Wrapf(err, "process lib images %s failed", info.nameTag)
|
||||
+ }
|
||||
+ // process app
|
||||
+ if err := info.processAppImg(s, appImagesMap, &t); err != nil {
|
||||
+ return errors.Wrapf(err, "process app images %s failed", info.nameTag)
|
||||
+ }
|
||||
+ tarballs[info.nameTag] = t
|
||||
+ }
|
||||
+ buf, err := json.Marshal(&tarballs)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ // manifest file
|
||||
+ manifestFile := filepath.Join(s.dest, manifestFile)
|
||||
+ if err := ioutils.AtomicWriteFile(manifestFile, buf, constant.DefaultRootFileMode); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+
|
||||
+ s.log.Info("Save separated image succeed")
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (s *separatorSave) clearDirs(reCreate bool) error {
|
||||
+ tmpDir := s.tmpDir
|
||||
+ dirs := []string{tmpDir.base, tmpDir.app, tmpDir.lib}
|
||||
+ var mkTmpDirs = func(dirs []string) error {
|
||||
+ for _, dir := range dirs {
|
||||
+ if err := os.MkdirAll(dir, constant.DefaultRootDirMode); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ }
|
||||
+ return nil
|
||||
+ }
|
||||
+
|
||||
+ var rmTmpDirs = func(dirs []string) error {
|
||||
+ for _, dir := range dirs {
|
||||
+ if err := os.RemoveAll(dir); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ }
|
||||
+ return nil
|
||||
+ }
|
||||
+
|
||||
+ if err := rmTmpDirs(dirs); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ if reCreate {
|
||||
+ if err := mkTmpDirs(dirs); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ }
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+// processTarName will trim the prefix of image name like example.io/library/myapp:v1
|
||||
+// after processed, the name will be myapp_v1_suffix
|
||||
+// mind: suffix here should not contain path separator
|
||||
+func (info imageInfo) processTarName(suffix string) string {
|
||||
+ originNames := strings.Split(info.name, string(os.PathSeparator))
|
||||
+ originTags := strings.Split(info.tag, string(os.PathSeparator))
|
||||
+ // get the last element of the list, which mast be the right name without prefix
|
||||
+ name := originNames[len(originNames)-1]
|
||||
+ tag := originTags[len(originTags)-1]
|
||||
+
|
||||
+ return fmt.Sprintf("%s_%s%s", name, tag, suffix)
|
||||
+}
|
||||
+
|
||||
+func (info *imageInfo) processBaseImg(sep *separatorSave, baseImagesMap map[string]string, tarball *tarballInfo) error {
|
||||
+ // process base
|
||||
+ tarball.BaseImageName = sep.base
|
||||
+ for _, layerID := range info.layers.base {
|
||||
+ tarball.BaseLayers = append(tarball.BaseLayers, layerID)
|
||||
+ if baseImg, ok := baseImagesMap[layerID]; !ok {
|
||||
+ srcLayerPath := filepath.Join(sep.tmpDir.untar, layerID)
|
||||
+ destLayerPath := filepath.Join(sep.tmpDir.base, layerID)
|
||||
+ if err := os.Rename(srcLayerPath, destLayerPath); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ baseTarName := info.processTarName(baseTarNameSuffix)
|
||||
+ baseTarName = sep.rename(baseTarName)
|
||||
+ baseTarPath := filepath.Join(sep.dest, baseTarName)
|
||||
+ if err := util.PackFiles(sep.tmpDir.base, baseTarPath, archive.Gzip, true); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ baseImagesMap[layerID] = baseTarPath
|
||||
+ tarball.BaseTarName = baseTarName
|
||||
+ digest, err := util.SHA256Sum(baseTarPath)
|
||||
+ if err != nil {
|
||||
+ return errors.Wrapf(err, "check sum for new base image %s failed", baseTarName)
|
||||
+ }
|
||||
+ tarball.BaseHash = digest
|
||||
+ } else {
|
||||
+ tarball.BaseTarName = filepath.Base(baseImg)
|
||||
+ digest, err := util.SHA256Sum(baseImg)
|
||||
+ if err != nil {
|
||||
+ return errors.Wrapf(err, "check sum for reuse base image %s failed", baseImg)
|
||||
+ }
|
||||
+ tarball.BaseHash = digest
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (info *imageInfo) processLibImg(sep *separatorSave, libImagesMap map[string]string, tarball *tarballInfo) error {
|
||||
+ // process lib
|
||||
+ if info.layers.lib == nil {
|
||||
+ return nil
|
||||
+ }
|
||||
+
|
||||
+ tarball.LibImageName = sep.lib
|
||||
+ for _, layerID := range info.layers.lib {
|
||||
+ tarball.LibLayers = append(tarball.LibLayers, layerID)
|
||||
+ if libImg, ok := libImagesMap[layerID]; !ok {
|
||||
+ srcLayerPath := filepath.Join(sep.tmpDir.untar, layerID)
|
||||
+ destLayerPath := filepath.Join(sep.tmpDir.lib, layerID)
|
||||
+ if err := os.Rename(srcLayerPath, destLayerPath); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ libTarName := info.processTarName(libTarNameSuffix)
|
||||
+ libTarName = sep.rename(libTarName)
|
||||
+ libTarPath := filepath.Join(sep.dest, libTarName)
|
||||
+ if err := util.PackFiles(sep.tmpDir.lib, libTarPath, archive.Gzip, true); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ libImagesMap[layerID] = libTarPath
|
||||
+ tarball.LibTarName = libTarName
|
||||
+ digest, err := util.SHA256Sum(libTarPath)
|
||||
+ if err != nil {
|
||||
+ return errors.Wrapf(err, "check sum for lib image %s failed", sep.lib)
|
||||
+ }
|
||||
+ tarball.LibHash = digest
|
||||
+ } else {
|
||||
+ tarball.LibTarName = filepath.Base(libImg)
|
||||
+ digest, err := util.SHA256Sum(libImg)
|
||||
+ if err != nil {
|
||||
+ return errors.Wrapf(err, "check sum for lib image %s failed", sep.lib)
|
||||
+ }
|
||||
+ tarball.LibHash = digest
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (info *imageInfo) processAppImg(sep *separatorSave, appImagesMap map[string]string, tarball *tarballInfo) error {
|
||||
+ // process app
|
||||
+ appTarName := info.processTarName(appTarNameSuffix)
|
||||
+ appTarName = sep.rename(appTarName)
|
||||
+ appTarPath := filepath.Join(sep.dest, appTarName)
|
||||
+ for _, layerID := range info.layers.app {
|
||||
+ srcLayerPath := filepath.Join(sep.tmpDir.untar, layerID)
|
||||
+ destLayerPath := filepath.Join(sep.tmpDir.app, layerID)
|
||||
+ if err := os.Rename(srcLayerPath, destLayerPath); err != nil {
|
||||
+ if appImg, ok := appImagesMap[layerID]; ok {
|
||||
+ return errors.Errorf("lib layers %s already saved in %s for image %s",
|
||||
+ layerID, appImg, info.nameTag)
|
||||
+ }
|
||||
+ }
|
||||
+ appImagesMap[layerID] = appTarPath
|
||||
+ tarball.AppLayers = append(tarball.AppLayers, layerID)
|
||||
+ }
|
||||
+ // create config file
|
||||
+ if err := info.createManifestFile(sep); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ if err := info.createRepositoriesFile(sep); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+
|
||||
+ srcConfigPath := filepath.Join(sep.tmpDir.untar, info.config)
|
||||
+ destConfigPath := filepath.Join(sep.tmpDir.app, info.config)
|
||||
+ if err := os.Rename(srcConfigPath, destConfigPath); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+
|
||||
+ if err := util.PackFiles(sep.tmpDir.app, appTarPath, archive.Gzip, true); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ tarball.AppTarName = appTarName
|
||||
+ digest, err := util.SHA256Sum(appTarPath)
|
||||
+ if err != nil {
|
||||
+ return errors.Wrapf(err, "check sum for app image %s failed", info.nameTag)
|
||||
+ }
|
||||
+ tarball.AppHash = digest
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (info imageInfo) createRepositoriesFile(sep *separatorSave) error {
|
||||
+ // create repositories
|
||||
+ type repoItem map[string]string
|
||||
+ repo := make(map[string]repoItem, 1)
|
||||
+ item := make(repoItem, 1)
|
||||
+ if _, ok := item[info.tag]; !ok {
|
||||
+ item[info.tag] = info.topLayer
|
||||
+ }
|
||||
+ repo[info.name] = item
|
||||
+ buf, err := json.Marshal(repo)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ repositoryFile := filepath.Join(sep.tmpDir.app, repositoriesFile)
|
||||
+ if err := ioutils.AtomicWriteFile(repositoryFile, buf, constant.DefaultRootFileMode); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (info imageInfo) createManifestFile(sep *separatorSave) error {
|
||||
+ // create manifest.json
|
||||
+ var s = imageManifest{
|
||||
+ Config: info.config,
|
||||
+ Layers: info.layers.all,
|
||||
+ RepoTags: info.repoTags,
|
||||
+ }
|
||||
+ var m []imageManifest
|
||||
+ m = append(m, s)
|
||||
+ buf, err := json.Marshal(&m)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ data := filepath.Join(sep.tmpDir.app, manifestDataFile)
|
||||
+ if err := ioutils.AtomicWriteFile(data, buf, constant.DefaultRootFileMode); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func getLayersID(layer []string) []string {
|
||||
+ var after = make([]string, len(layer))
|
||||
+ for i, v := range layer {
|
||||
+ after[i] = strings.Split(v, "/")[0]
|
||||
+ }
|
||||
+ return after
|
||||
+}
|
||||
+
|
||||
+func (s *separatorSave) constructSingleImgInfo(mani imageManifest, store *store.Store) (imageInfo, error) {
|
||||
+ var libLayers, appLayers []string
|
||||
+ imageRepoFields := strings.Split(mani.RepoTags[0], ":")
|
||||
+ imageLayers := getLayersID(mani.Layers)
|
||||
+
|
||||
+ libs, bases, err := s.checkLayersHash(mani.HashMap, store)
|
||||
+ if err != nil {
|
||||
+ return imageInfo{}, errors.Wrap(err, "compare layers failed")
|
||||
+ }
|
||||
+ baseLayers := imageLayers[0:len(bases)]
|
||||
+ if len(libs) != 0 {
|
||||
+ libLayers = imageLayers[len(bases):len(libs)]
|
||||
+ appLayers = imageLayers[len(libs):]
|
||||
+ } else {
|
||||
+ libLayers = nil
|
||||
+ appLayers = imageLayers[len(bases):]
|
||||
+ }
|
||||
+
|
||||
+ return imageInfo{
|
||||
+ config: mani.Config,
|
||||
+ repoTags: mani.RepoTags,
|
||||
+ nameTag: mani.RepoTags[0],
|
||||
+ name: strings.Join(imageRepoFields[0:len(imageRepoFields)-1], ":"),
|
||||
+ tag: imageRepoFields[len(imageRepoFields)-1],
|
||||
+ layers: layer{app: appLayers, lib: libLayers, base: baseLayers, all: mani.Layers},
|
||||
+ topLayer: imageLayers[len(imageLayers)-1],
|
||||
+ }, nil
|
||||
+}
|
||||
+
|
||||
+func (s *separatorSave) checkLayersHash(layerHashMap map[string]string, store *store.Store) ([]string, []string, error) {
|
||||
+ libHash, err := getLayerHashFromStorage(store, s.lib)
|
||||
+ if err != nil {
|
||||
+ return nil, nil, errors.Wrapf(err, "get lib image %s layers failed", s.lib)
|
||||
+ }
|
||||
+ baseHash, err := getLayerHashFromStorage(store, s.base)
|
||||
+ if err != nil {
|
||||
+ return nil, nil, errors.Wrapf(err, "get base image %s layers failed", s.base)
|
||||
+ }
|
||||
+ if len(libHash) >= len(layerHashMap) || len(baseHash) >= len(layerHashMap) {
|
||||
+ return nil, nil, errors.Errorf("number of base or lib layers is equal or greater than saved app layers")
|
||||
+ }
|
||||
+
|
||||
+ for _, l := range libHash {
|
||||
+ if _, ok := layerHashMap[l]; !ok {
|
||||
+ return nil, nil, errors.Errorf("dismatch checksum for lib image %s", s.lib)
|
||||
+ }
|
||||
+ }
|
||||
+ for _, b := range baseHash {
|
||||
+ if _, ok := layerHashMap[b]; !ok {
|
||||
+ return nil, nil, errors.Errorf("dismatch checksum for base image %s", s.base)
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return libHash, baseHash, nil
|
||||
+}
|
||||
+
|
||||
+func (s *separatorSave) constructImageInfos(manifest []imageManifest, store *store.Store) (map[string]imageInfo, error) {
|
||||
+ s.log.Info("Constructing image info")
|
||||
+
|
||||
+ var imgInfos = make(map[string]imageInfo, 1)
|
||||
+ for _, mani := range manifest {
|
||||
+ imgInfo, err := s.constructSingleImgInfo(mani, store)
|
||||
+ if err != nil {
|
||||
+ s.log.Errorf("Constructing image info failed: %v", err)
|
||||
+ return nil, errors.Wrap(err, "construct image info failed")
|
||||
+ }
|
||||
+ if _, ok := imgInfos[imgInfo.nameTag]; !ok {
|
||||
+ imgInfos[imgInfo.nameTag] = imgInfo
|
||||
+ }
|
||||
+ }
|
||||
+ return imgInfos, nil
|
||||
+}
|
||||
+
|
||||
+func (s *separatorSave) rename(name string) string {
|
||||
+ if len(s.renameData) != 0 {
|
||||
+ s.log.Info("Renaming image tarballs")
|
||||
+ for _, item := range s.renameData {
|
||||
+ if item.Name == name {
|
||||
+ return item.Rename
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ return name
|
||||
+}
|
||||
diff --git a/util/cipher.go b/util/cipher.go
|
||||
index b2aea2a9..d92705c3 100644
|
||||
--- a/util/cipher.go
|
||||
+++ b/util/cipher.go
|
||||
@@ -19,9 +19,11 @@ import (
|
||||
"crypto/cipher"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
+ "crypto/sha256"
|
||||
"crypto/x509"
|
||||
"encoding/hex"
|
||||
"encoding/pem"
|
||||
+ "fmt"
|
||||
"hash"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
@@ -229,3 +231,79 @@ func ReadPublicKey(path string) (rsa.PublicKey, error) {
|
||||
|
||||
return *key, nil
|
||||
}
|
||||
+
|
||||
+func hashFile(path string) (string, error) {
|
||||
+ cleanPath := filepath.Clean(path)
|
||||
+ if len(cleanPath) == 0 {
|
||||
+ return "", errors.New("failed to hash empty path")
|
||||
+ }
|
||||
+ if f, err := os.Stat(cleanPath); err != nil {
|
||||
+ return "", errors.Errorf("failed to stat file %q", cleanPath)
|
||||
+ } else if f.IsDir() {
|
||||
+ return "", errors.New("failed to hash directory")
|
||||
+ }
|
||||
+
|
||||
+ file, err := ioutil.ReadFile(cleanPath) // nolint:gosec
|
||||
+ if err != nil {
|
||||
+ return "", errors.Wrapf(err, "hash file failed")
|
||||
+ }
|
||||
+
|
||||
+ return fmt.Sprintf("%x", sha256.Sum256(file)), nil
|
||||
+}
|
||||
+
|
||||
+func hashDir(path string) (string, error) {
|
||||
+ var checkSum string
|
||||
+ if err := filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
|
||||
+ cleanPath := filepath.Clean(path)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ if !info.Mode().IsRegular() {
|
||||
+ return nil
|
||||
+ }
|
||||
+ if !info.IsDir() {
|
||||
+ f, err := ioutil.ReadFile(cleanPath) // nolint:gosec
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ fileHash := fmt.Sprintf("%x", sha256.Sum256(f))
|
||||
+ checkSum = fmt.Sprintf("%s%s", checkSum, fileHash)
|
||||
+ }
|
||||
+ return nil
|
||||
+ }); err != nil {
|
||||
+ return "", err
|
||||
+ }
|
||||
+
|
||||
+ return fmt.Sprintf("%x", sha256.Sum256([]byte(checkSum))), nil
|
||||
+}
|
||||
+
|
||||
+// SHA256Sum will calculate sha256 checksum for path(file or directory)
|
||||
+// When calculate directory, each file of folder will be calculated and
|
||||
+// the checksum will be concatenated to next checksum until every file
|
||||
+// counted, the result will be used for final checksum calculation
|
||||
+func SHA256Sum(path string) (string, error) {
|
||||
+ path = filepath.Clean(path)
|
||||
+ f, err := os.Stat(path)
|
||||
+ if err != nil {
|
||||
+ return "", err
|
||||
+ }
|
||||
+ if f.IsDir() {
|
||||
+ return hashDir(path)
|
||||
+ }
|
||||
+
|
||||
+ return hashFile(path)
|
||||
+}
|
||||
+
|
||||
+// CheckSum will calculate the sha256sum for path and compare it with
|
||||
+// the target, if not match, return error
|
||||
+func CheckSum(path, target string) error {
|
||||
+ digest, err := SHA256Sum(path)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ if digest != target {
|
||||
+ return errors.Errorf("check sum for path %s failed, got %s, want %s",
|
||||
+ path, digest, target)
|
||||
+ }
|
||||
+ return nil
|
||||
+}
|
||||
diff --git a/util/file.go b/util/file.go
|
||||
new file mode 100644
|
||||
index 00000000..cd4a75d5
|
||||
--- /dev/null
|
||||
+++ b/util/file.go
|
||||
@@ -0,0 +1,153 @@
|
||||
+// Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
|
||||
+// isula-build licensed under the Mulan PSL v2.
|
||||
+// You can use this software according to the terms and conditions of the Mulan PSL v2.
|
||||
+// You may obtain a copy of Mulan PSL v2 at:
|
||||
+// http://license.coscl.org.cn/MulanPSL2
|
||||
+// THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
|
||||
+// IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
|
||||
+// PURPOSE.
|
||||
+// See the Mulan PSL v2 for more details.
|
||||
+// Author: Xiang Li
|
||||
+// Create: 2021-08-24
|
||||
+// Description: file manipulation related common functions
|
||||
+
|
||||
+package util
|
||||
+
|
||||
+import (
|
||||
+ "encoding/json"
|
||||
+ "io"
|
||||
+ "io/ioutil"
|
||||
+ "os"
|
||||
+ "path/filepath"
|
||||
+ "time"
|
||||
+
|
||||
+ "github.com/containers/storage/pkg/archive"
|
||||
+ "github.com/pkg/errors"
|
||||
+)
|
||||
+
|
||||
+const (
|
||||
+ fileMaxSize = 10 * 1024 * 1024 // 10MB
|
||||
+)
|
||||
+
|
||||
+// ReadSmallFile read small file less than 10MB
|
||||
+func ReadSmallFile(path string) ([]byte, error) {
|
||||
+ st, err := os.Lstat(path)
|
||||
+ if err != nil {
|
||||
+ return nil, err
|
||||
+ }
|
||||
+ if st.Size() > fileMaxSize {
|
||||
+ return nil, errors.Errorf("file %q too big", path)
|
||||
+ }
|
||||
+ return ioutil.ReadFile(path) // nolint: gosec
|
||||
+}
|
||||
+
|
||||
+// LoadJSONFile load json files and store it into v
|
||||
+func LoadJSONFile(file string, v interface{}) error {
|
||||
+ f, err := ReadSmallFile(file)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ return json.Unmarshal(f, v)
|
||||
+}
|
||||
+
|
||||
+// ChangeDirModifyTime changes modify time of directory
|
||||
+func ChangeDirModifyTime(dir string) error {
|
||||
+ fs, rErr := ioutil.ReadDir(dir)
|
||||
+ if rErr != nil {
|
||||
+ return rErr
|
||||
+ }
|
||||
+ for _, f := range fs {
|
||||
+ src := filepath.Join(dir, f.Name())
|
||||
+ if err := ChangeFileModifyTime(src); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ if f.IsDir() {
|
||||
+ if err := ChangeDirModifyTime(src); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+// ChangeFileModifyTime changes modify time of file by fixing time at 2017-01-01 00:00:00
|
||||
+func ChangeFileModifyTime(path string) error {
|
||||
+ mtime := time.Date(2017, time.January, 0, 0, 0, 0, 0, time.UTC)
|
||||
+ atime := time.Date(2017, time.January, 0, 0, 0, 0, 0, time.UTC)
|
||||
+ if _, err := os.Lstat(path); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ if err := os.Chtimes(path, atime, mtime); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+// PackFiles will pack files in "src" directory to "dest" file
|
||||
+// by using different compression method defined by "com"
|
||||
+// the files' modify time attribute will be set to a fix time "2017-01-01 00:00:00"
|
||||
+// if set "modifyTime" to true
|
||||
+func PackFiles(src, dest string, com archive.Compression, modifyTime bool) (err error) {
|
||||
+ if modifyTime {
|
||||
+ if err = ChangeDirModifyTime(src); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ reader, err := archive.Tar(src, com)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+
|
||||
+ f, err := os.Create(dest)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+
|
||||
+ defer func() {
|
||||
+ cErr := f.Close()
|
||||
+ if cErr != nil && err == nil {
|
||||
+ err = cErr
|
||||
+ }
|
||||
+ }()
|
||||
+
|
||||
+ if _, err = io.Copy(f, reader); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+// UnpackFile will unpack "src" file to "dest" directory
|
||||
+// by using different compression method defined by "com"
|
||||
+// The src file will be remove if set "rm" to true
|
||||
+func UnpackFile(src, dest string, com archive.Compression, rm bool) (err error) {
|
||||
+ cleanPath := filepath.Clean(src)
|
||||
+ f, err := os.Open(cleanPath) // nolint:gosec
|
||||
+ if err != nil {
|
||||
+ return errors.Wrapf(err, "unpack: open %q failed", src)
|
||||
+ }
|
||||
+
|
||||
+ defer func() {
|
||||
+ cErr := f.Close()
|
||||
+ if cErr != nil && err == nil {
|
||||
+ err = cErr
|
||||
+ }
|
||||
+ }()
|
||||
+
|
||||
+ if err = archive.Untar(f, dest, &archive.TarOptions{Compression: com}); err != nil {
|
||||
+ return errors.Wrapf(err, "unpack file %q failed", src)
|
||||
+ }
|
||||
+
|
||||
+ if err = ChangeDirModifyTime(dest); err != nil {
|
||||
+ return errors.Wrapf(err, "change modify time for directory %q failed", dest)
|
||||
+ }
|
||||
+
|
||||
+ if rm {
|
||||
+ if err = os.RemoveAll(src); err != nil {
|
||||
+ return errors.Errorf("unpack: remove %q failed: %v ", src, err)
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
--
|
||||
2.27.0
|
||||
|
||||
274
patch/0075-cli-finish-client-load-separated-image.patch
Normal file
274
patch/0075-cli-finish-client-load-separated-image.patch
Normal file
@ -0,0 +1,274 @@
|
||||
From 5749a92be53a3e8a135b4f7e59e8fd6d470fbd55 Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Tue, 26 Oct 2021 14:20:07 +0800
|
||||
Subject: [PATCH 05/16] cli:finish client load separated image
|
||||
|
||||
reason: support isula-build client side process info for load separated
|
||||
image
|
||||
ABI change:(client)
|
||||
- --input: name of app images when load separated images
|
||||
- --dir: path to separated images' tarball directory
|
||||
- --base: base image tarball path of separated images
|
||||
- --lib: lib image tarball path of separated images
|
||||
- --no-check: skip sha256 check sum for legacy separated images loading
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
cmd/cli/load.go | 113 ++++++++++++++++++++++++++++++++++++++++---
|
||||
cmd/cli/load_test.go | 50 +++++++++++++++++--
|
||||
cmd/cli/mock.go | 10 ++++
|
||||
3 files changed, 160 insertions(+), 13 deletions(-)
|
||||
|
||||
diff --git a/cmd/cli/load.go b/cmd/cli/load.go
|
||||
index 16e90a26..2a9df772 100644
|
||||
--- a/cmd/cli/load.go
|
||||
+++ b/cmd/cli/load.go
|
||||
@@ -25,18 +25,32 @@ import (
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
+ constant "isula.org/isula-build"
|
||||
pb "isula.org/isula-build/api/services"
|
||||
"isula.org/isula-build/util"
|
||||
)
|
||||
|
||||
+type separatorLoadOption struct {
|
||||
+ app string
|
||||
+ base string
|
||||
+ lib string
|
||||
+ dir string
|
||||
+ skipCheck bool
|
||||
+ enabled bool
|
||||
+}
|
||||
+
|
||||
type loadOptions struct {
|
||||
- path string
|
||||
+ path string
|
||||
+ loadID string
|
||||
+ sep separatorLoadOption
|
||||
}
|
||||
|
||||
var loadOpts loadOptions
|
||||
|
||||
const (
|
||||
- loadExample = `isula-build ctr-img load -i busybox.tar`
|
||||
+ loadExample = `isula-build ctr-img load -i busybox.tar
|
||||
+isula-build ctr-img load -i app:latest -d /home/Images
|
||||
+isula-build ctr-img load -i app:latest -d /home/Images -b /home/Images/base.tar.gz -l /home/Images/lib.tar.gz`
|
||||
)
|
||||
|
||||
// NewLoadCmd returns image load command
|
||||
@@ -49,12 +63,20 @@ func NewLoadCmd() *cobra.Command {
|
||||
RunE: loadCommand,
|
||||
}
|
||||
|
||||
- loadCmd.PersistentFlags().StringVarP(&loadOpts.path, "input", "i", "", "Path to local tarball")
|
||||
+ loadCmd.PersistentFlags().StringVarP(&loadOpts.path, "input", "i", "", "Path to local tarball(or app image name when load separated images)")
|
||||
+ loadCmd.PersistentFlags().StringVarP(&loadOpts.sep.dir, "dir", "d", "", "Path to separated image tarballs directory")
|
||||
+ loadCmd.PersistentFlags().StringVarP(&loadOpts.sep.base, "base", "b", "", "Base image tarball path of separated images")
|
||||
+ loadCmd.PersistentFlags().StringVarP(&loadOpts.sep.lib, "lib", "l", "", "Library image tarball path of separated images")
|
||||
+ loadCmd.PersistentFlags().BoolVarP(&loadOpts.sep.skipCheck, "no-check", "", false, "Skip sha256 check sum for legacy separated images loading")
|
||||
|
||||
return loadCmd
|
||||
}
|
||||
|
||||
func loadCommand(cmd *cobra.Command, args []string) error {
|
||||
+ if err := loadOpts.checkLoadOpts(); err != nil {
|
||||
+ return errors.Wrapf(err, "check load options failed")
|
||||
+ }
|
||||
+
|
||||
ctx := context.Background()
|
||||
cli, err := NewClient(ctx)
|
||||
if err != nil {
|
||||
@@ -65,14 +87,20 @@ func loadCommand(cmd *cobra.Command, args []string) error {
|
||||
}
|
||||
|
||||
func runLoad(ctx context.Context, cli Cli) error {
|
||||
- var err error
|
||||
-
|
||||
- if loadOpts.path, err = resolveLoadPath(loadOpts.path); err != nil {
|
||||
- return err
|
||||
+ loadOpts.loadID = util.GenerateNonCryptoID()[:constant.DefaultIDLen]
|
||||
+ sep := &pb.SeparatorLoad{
|
||||
+ App: loadOpts.sep.app,
|
||||
+ Dir: loadOpts.sep.dir,
|
||||
+ Base: loadOpts.sep.base,
|
||||
+ Lib: loadOpts.sep.lib,
|
||||
+ SkipCheck: loadOpts.sep.skipCheck,
|
||||
+ Enabled: loadOpts.sep.enabled,
|
||||
}
|
||||
|
||||
resp, err := cli.Client().Load(ctx, &pb.LoadRequest{
|
||||
- Path: loadOpts.path,
|
||||
+ Path: loadOpts.path,
|
||||
+ LoadID: loadOpts.loadID,
|
||||
+ Sep: sep,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -114,3 +142,72 @@ func resolveLoadPath(path string) (string, error) {
|
||||
|
||||
return path, nil
|
||||
}
|
||||
+
|
||||
+func (opt *loadOptions) checkLoadOpts() error {
|
||||
+ // normal load
|
||||
+ if !opt.sep.isEnabled() {
|
||||
+ path, err := resolveLoadPath(opt.path)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ opt.path = path
|
||||
+
|
||||
+ return nil
|
||||
+ }
|
||||
+
|
||||
+ // load separated image
|
||||
+ opt.sep.enabled = true
|
||||
+ if len(opt.path) == 0 {
|
||||
+ return errors.New("app image should not be empty")
|
||||
+ }
|
||||
+
|
||||
+ // Use opt.path as app image name when operating separated images
|
||||
+ // this can be mark as a switch for handling separated images
|
||||
+ opt.sep.app = opt.path
|
||||
+
|
||||
+ if err := opt.sep.check(); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (sep *separatorLoadOption) isEnabled() bool {
|
||||
+ return util.AnyFlagSet(sep.dir, sep.base, sep.lib, sep.app)
|
||||
+}
|
||||
+
|
||||
+func (sep *separatorLoadOption) check() error {
|
||||
+ pwd, err := os.Getwd()
|
||||
+ if err != nil {
|
||||
+ return errors.New("get current path failed")
|
||||
+ }
|
||||
+ if !util.IsValidImageName(sep.app) {
|
||||
+ return errors.Errorf("invalid image name: %s", sep.app)
|
||||
+ }
|
||||
+
|
||||
+ if len(sep.base) != 0 {
|
||||
+ path, err := resolveLoadPath(sep.base)
|
||||
+ if err != nil {
|
||||
+ return errors.Wrap(err, "resolve base tarball path failed")
|
||||
+ }
|
||||
+ sep.base = path
|
||||
+ }
|
||||
+ if len(sep.lib) != 0 {
|
||||
+ path, err := resolveLoadPath(sep.lib)
|
||||
+ if err != nil {
|
||||
+ return errors.Wrap(err, "resolve lib tarball path failed")
|
||||
+ }
|
||||
+ sep.lib = path
|
||||
+ }
|
||||
+ if len(sep.dir) == 0 {
|
||||
+ return errors.New("image tarball directory should not be empty")
|
||||
+ }
|
||||
+ if !filepath.IsAbs(sep.dir) {
|
||||
+ sep.dir = util.MakeAbsolute(sep.dir, pwd)
|
||||
+ }
|
||||
+ if !util.IsExist(sep.dir) {
|
||||
+ return errors.Errorf("image tarball directory %s is not exist", sep.dir)
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
diff --git a/cmd/cli/load_test.go b/cmd/cli/load_test.go
|
||||
index 9c753e23..b7bf2a57 100644
|
||||
--- a/cmd/cli/load_test.go
|
||||
+++ b/cmd/cli/load_test.go
|
||||
@@ -15,19 +15,59 @@ package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
+ "io/ioutil"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"gotest.tools/v3/assert"
|
||||
"gotest.tools/v3/fs"
|
||||
+ constant "isula.org/isula-build"
|
||||
)
|
||||
|
||||
func TestLoadCmd(t *testing.T) {
|
||||
- cmd := NewLoadCmd()
|
||||
- err := cmd.Execute()
|
||||
- assert.Equal(t, err != nil, true)
|
||||
- err = loadCommand(cmd, nil)
|
||||
- assert.ErrorContains(t, err, "isula_build")
|
||||
+ tmpDir := fs.NewFile(t, t.Name())
|
||||
+ err := ioutil.WriteFile(tmpDir.Path(), []byte("This is test file"), constant.DefaultSharedFileMode)
|
||||
+ assert.NilError(t, err)
|
||||
+ defer tmpDir.Remove()
|
||||
+
|
||||
+ type testcase struct {
|
||||
+ name string
|
||||
+ path string
|
||||
+ errString string
|
||||
+ args []string
|
||||
+ wantErr bool
|
||||
+ sep separatorLoadOption
|
||||
+ }
|
||||
+ // For normal cases, default err is "invalid socket path: unix:///var/run/isula_build.sock".
|
||||
+ // As daemon is not running as we run unit test.
|
||||
+ var testcases = []testcase{
|
||||
+ {
|
||||
+ name: "TC1 - normal case",
|
||||
+ path: tmpDir.Path(),
|
||||
+ errString: "isula_build.sock",
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ }
|
||||
+
|
||||
+ for _, tc := range testcases {
|
||||
+ t.Run(tc.name, func(t *testing.T) {
|
||||
+ loadCmd := NewLoadCmd()
|
||||
+ loadOpts = loadOptions{
|
||||
+ path: tc.path,
|
||||
+ sep: tc.sep,
|
||||
+ }
|
||||
+ err := loadCmd.Execute()
|
||||
+ assert.Equal(t, err != nil, true)
|
||||
+
|
||||
+ err = loadCommand(loadCmd, tc.args)
|
||||
+ if tc.wantErr {
|
||||
+ assert.ErrorContains(t, err, tc.errString)
|
||||
+ }
|
||||
+ if !tc.wantErr {
|
||||
+ assert.NilError(t, err)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
}
|
||||
|
||||
func TestRunLoad(t *testing.T) {
|
||||
diff --git a/cmd/cli/mock.go b/cmd/cli/mock.go
|
||||
index 2ae07d56..142c87fa 100644
|
||||
--- a/cmd/cli/mock.go
|
||||
+++ b/cmd/cli/mock.go
|
||||
@@ -318,6 +318,16 @@ func (f *mockDaemon) importImage(_ context.Context, opts ...grpc.CallOption) (pb
|
||||
|
||||
func (f *mockDaemon) load(_ context.Context, in *pb.LoadRequest, opts ...grpc.CallOption) (pb.Control_LoadClient, error) {
|
||||
f.loadReq = in
|
||||
+ path := f.loadReq.Path
|
||||
+ sep := f.loadReq.Sep
|
||||
+ if !sep.Enabled {
|
||||
+ if path == "" {
|
||||
+ return &mockLoadClient{}, errors.Errorf("tarball path should not be empty")
|
||||
+ }
|
||||
+ _, err := resolveLoadPath(path)
|
||||
+ return &mockLoadClient{}, err
|
||||
+ }
|
||||
+
|
||||
return &mockLoadClient{}, nil
|
||||
}
|
||||
|
||||
--
|
||||
2.27.0
|
||||
|
||||
389
patch/0076-daemon-finish-daemon-load-separated-image.patch
Normal file
389
patch/0076-daemon-finish-daemon-load-separated-image.patch
Normal file
@ -0,0 +1,389 @@
|
||||
From 6545a2222419954045cf4b80cc9f03f918e568af Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Tue, 26 Oct 2021 14:21:02 +0800
|
||||
Subject: [PATCH] daemon:finish daemon load separated image
|
||||
|
||||
reason: support isula-build daemon side load separated image
|
||||
ABI change(daemon): none
|
||||
Load process changes:
|
||||
1. construct image tarball at the beginning of load process
|
||||
- input: separated images
|
||||
- output: none
|
||||
- addition: new images in local storages
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
Signed-off-by: lixiang <lixiang172@huawei.com>
|
||||
---
|
||||
daemon/load.go | 306 +++++++++++++++++++++++++++++++++++++++++++++++--
|
||||
1 file changed, 294 insertions(+), 12 deletions(-)
|
||||
|
||||
diff --git a/daemon/load.go b/daemon/load.go
|
||||
index 2fb8e27d..41690abc 100644
|
||||
--- a/daemon/load.go
|
||||
+++ b/daemon/load.go
|
||||
@@ -14,11 +14,16 @@
|
||||
package daemon
|
||||
|
||||
import (
|
||||
+ "io/ioutil"
|
||||
+ "os"
|
||||
+ "path/filepath"
|
||||
+
|
||||
"github.com/containers/image/v5/docker/tarfile"
|
||||
ociarchive "github.com/containers/image/v5/oci/archive"
|
||||
"github.com/containers/image/v5/transports/alltransports"
|
||||
"github.com/containers/image/v5/types"
|
||||
"github.com/containers/storage"
|
||||
+ "github.com/containers/storage/pkg/archive"
|
||||
securejoin "github.com/cyphar/filepath-securejoin"
|
||||
imgspecv1 "github.com/opencontainers/image-spec/specs-go/v1"
|
||||
"github.com/pkg/errors"
|
||||
@@ -33,30 +38,108 @@ import (
|
||||
"isula.org/isula-build/util"
|
||||
)
|
||||
|
||||
+const (
|
||||
+ tmpBaseDirName = "base"
|
||||
+ tmpAppDirName = "app"
|
||||
+ tmpLibDirName = "lib"
|
||||
+ unionCompressedTarName = "all.tar.gz"
|
||||
+)
|
||||
+
|
||||
+type loadImageTmpDir struct {
|
||||
+ app string
|
||||
+ base string
|
||||
+ lib string
|
||||
+ root string
|
||||
+}
|
||||
+
|
||||
+type separatorLoad struct {
|
||||
+ log *logrus.Entry
|
||||
+ tmpDir loadImageTmpDir
|
||||
+ info tarballInfo
|
||||
+ appName string
|
||||
+ basePath string
|
||||
+ appPath string
|
||||
+ libPath string
|
||||
+ dir string
|
||||
+ skipCheck bool
|
||||
+ enabled bool
|
||||
+}
|
||||
+
|
||||
type loadOptions struct {
|
||||
- path string
|
||||
- format string
|
||||
+ path string
|
||||
+ format string
|
||||
+ logEntry *logrus.Entry
|
||||
+ sep separatorLoad
|
||||
}
|
||||
|
||||
-func (b *Backend) getLoadOptions(req *pb.LoadRequest) loadOptions {
|
||||
- return loadOptions{
|
||||
+func (b *Backend) getLoadOptions(req *pb.LoadRequest) (loadOptions, error) {
|
||||
+ var opt = loadOptions{
|
||||
path: req.GetPath(),
|
||||
+ sep: separatorLoad{
|
||||
+ appName: req.GetSep().GetApp(),
|
||||
+ basePath: req.GetSep().GetBase(),
|
||||
+ libPath: req.GetSep().GetLib(),
|
||||
+ dir: req.GetSep().GetDir(),
|
||||
+ skipCheck: req.GetSep().GetSkipCheck(),
|
||||
+ enabled: req.GetSep().GetEnabled(),
|
||||
+ },
|
||||
+ logEntry: logrus.WithFields(logrus.Fields{"LoadID": req.GetLoadID()}),
|
||||
}
|
||||
+
|
||||
+ // normal loadOptions
|
||||
+ if !opt.sep.enabled {
|
||||
+ if err := util.CheckLoadFile(opt.path); err != nil {
|
||||
+ return loadOptions{}, err
|
||||
+ }
|
||||
+ return opt, nil
|
||||
+ }
|
||||
+
|
||||
+ // load separated images
|
||||
+ // log is used for sep methods
|
||||
+ opt.sep.log = opt.logEntry
|
||||
+ tmpRoot := filepath.Join(b.daemon.opts.DataRoot, filepath.Join(dataRootTmpDirPrefix, req.GetLoadID()))
|
||||
+ opt.sep.tmpDir.root = tmpRoot
|
||||
+ opt.sep.tmpDir.base = filepath.Join(tmpRoot, tmpBaseDirName)
|
||||
+ opt.sep.tmpDir.app = filepath.Join(tmpRoot, tmpAppDirName)
|
||||
+ opt.sep.tmpDir.lib = filepath.Join(tmpRoot, tmpLibDirName)
|
||||
+
|
||||
+ // check image name and add "latest" tag if not present
|
||||
+ _, appImgName, err := image.GetNamedTaggedReference(opt.sep.appName)
|
||||
+ if err != nil {
|
||||
+ return loadOptions{}, err
|
||||
+ }
|
||||
+ opt.sep.appName = appImgName
|
||||
+
|
||||
+ return opt, nil
|
||||
}
|
||||
|
||||
// Load loads the image
|
||||
func (b *Backend) Load(req *pb.LoadRequest, stream pb.Control_LoadServer) error {
|
||||
- logrus.Info("LoadRequest received")
|
||||
+ logrus.WithFields(logrus.Fields{
|
||||
+ "LoadID": req.GetLoadID(),
|
||||
+ }).Info("LoadRequest received")
|
||||
|
||||
var (
|
||||
si *storage.Image
|
||||
repoTags [][]string
|
||||
- err error
|
||||
)
|
||||
- opts := b.getLoadOptions(req)
|
||||
+ opts, err := b.getLoadOptions(req)
|
||||
+ if err != nil {
|
||||
+ return errors.Wrap(err, "process load options failed")
|
||||
+ }
|
||||
+
|
||||
+ defer func() {
|
||||
+ if tErr := os.RemoveAll(opts.sep.tmpDir.root); tErr != nil {
|
||||
+ opts.logEntry.Warnf("Removing load tmp directory %q failed: %v", opts.sep.tmpDir.root, tErr)
|
||||
+ }
|
||||
+ }()
|
||||
|
||||
- if cErr := util.CheckLoadFile(req.Path); cErr != nil {
|
||||
- return cErr
|
||||
+ // construct separated images
|
||||
+ if opts.sep.enabled {
|
||||
+ if lErr := loadSeparatedImage(&opts); lErr != nil {
|
||||
+ opts.logEntry.Errorf("Load separated image for %s failed: %v", opts.sep.appName, lErr)
|
||||
+ return lErr
|
||||
+ }
|
||||
}
|
||||
|
||||
repoTags, err = tryToParseImageFormatFromTarball(b.daemon.opts.DataRoot, &opts)
|
||||
@@ -149,8 +232,13 @@ func getDockerRepoTagFromImageTar(systemContext *types.SystemContext, path strin
|
||||
// tmp dir will be removed after NewSourceFromFileWithContext
|
||||
tarfileSource, err := tarfile.NewSourceFromFileWithContext(systemContext, path)
|
||||
if err != nil {
|
||||
- return nil, errors.Wrapf(err, "failed to get the source of loading tar file")
|
||||
+ return nil, errors.Wrap(err, "failed to get the source of loading tar file")
|
||||
}
|
||||
+ defer func() {
|
||||
+ if cErr := tarfileSource.Close(); cErr != nil {
|
||||
+ logrus.Warnf("tar file source close failed: %v", cErr)
|
||||
+ }
|
||||
+ }()
|
||||
|
||||
topLevelImageManifest, err := tarfileSource.LoadTarManifest()
|
||||
if err != nil || len(topLevelImageManifest) == 0 {
|
||||
@@ -172,12 +260,12 @@ func getOCIRepoTagFromImageTar(systemContext *types.SystemContext, path string)
|
||||
|
||||
srcRef, err := alltransports.ParseImageName(exporter.FormatTransport(constant.OCIArchiveTransport, path))
|
||||
if err != nil {
|
||||
- return nil, errors.Wrapf(err, "failed to parse image name of oci image format")
|
||||
+ return nil, errors.Wrap(err, "failed to parse image name of oci image format")
|
||||
}
|
||||
|
||||
tarManifest, err := ociarchive.LoadManifestDescriptorWithContext(systemContext, srcRef)
|
||||
if err != nil {
|
||||
- return nil, errors.Wrapf(err, "failed to load manifest descriptor of oci image format")
|
||||
+ return nil, errors.Wrap(err, "failed to load manifest descriptor of oci image format")
|
||||
}
|
||||
|
||||
// For now, we only support load single image in archive file
|
||||
@@ -187,3 +275,197 @@ func getOCIRepoTagFromImageTar(systemContext *types.SystemContext, path string)
|
||||
|
||||
return [][]string{{}}, nil
|
||||
}
|
||||
+
|
||||
+func loadSeparatedImage(opt *loadOptions) error {
|
||||
+ s := &opt.sep
|
||||
+ s.log.Infof("Starting load separated image %s", s.appName)
|
||||
+
|
||||
+ // load manifest file to get tarball info
|
||||
+ if err := s.getTarballInfo(); err != nil {
|
||||
+ return errors.Wrap(err, "failed to get tarball info")
|
||||
+ }
|
||||
+ if err := s.constructTarballInfo(); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ // checksum for image tarballs
|
||||
+ if err := s.tarballCheckSum(); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ // process image tarballs and get final constructed image tarball
|
||||
+ tarPath, err := s.processTarballs()
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ opt.path = tarPath
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (s *separatorLoad) getTarballInfo() error {
|
||||
+ manifest, err := securejoin.SecureJoin(s.dir, manifestFile)
|
||||
+ if err != nil {
|
||||
+ return errors.Wrap(err, "join manifest file path failed")
|
||||
+ }
|
||||
+
|
||||
+ var t = make(map[string]tarballInfo)
|
||||
+ if err = util.LoadJSONFile(manifest, &t); err != nil {
|
||||
+ return errors.Wrap(err, "load manifest file failed")
|
||||
+ }
|
||||
+
|
||||
+ tarball, ok := t[s.appName]
|
||||
+ if !ok {
|
||||
+ return errors.Errorf("failed to find app image %s", s.appName)
|
||||
+ }
|
||||
+ s.info = tarball
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (s *separatorLoad) constructTarballInfo() (err error) {
|
||||
+ s.log.Infof("construct image tarball info for %s", s.appName)
|
||||
+ // fill up path for separator
|
||||
+ // this case should not happened since client side already check this flag
|
||||
+ if len(s.appName) == 0 {
|
||||
+ return errors.New("app image name should not be empty")
|
||||
+ }
|
||||
+ s.appPath, err = securejoin.SecureJoin(s.dir, s.info.AppTarName)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+
|
||||
+ if len(s.basePath) == 0 {
|
||||
+ if len(s.info.BaseTarName) == 0 {
|
||||
+ return errors.Errorf("base image %s tarball can not be empty", s.info.BaseImageName)
|
||||
+ }
|
||||
+ s.log.Info("Base image path is empty, use path from manifest")
|
||||
+ s.basePath, err = securejoin.SecureJoin(s.dir, s.info.BaseTarName)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ }
|
||||
+ if len(s.libPath) == 0 && len(s.info.LibTarName) != 0 {
|
||||
+ s.log.Info("Lib image path is empty, use path from manifest")
|
||||
+ s.libPath, err = securejoin.SecureJoin(s.dir, s.info.LibTarName)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (s *separatorLoad) tarballCheckSum() error {
|
||||
+ if s.skipCheck {
|
||||
+ s.log.Info("Skip checksum for tarballs")
|
||||
+ return nil
|
||||
+ }
|
||||
+
|
||||
+ // app image tarball can not be empty
|
||||
+ if len(s.appPath) == 0 {
|
||||
+ return errors.New("app image tarball path can not be empty")
|
||||
+ }
|
||||
+ if err := util.CheckSum(s.appPath, s.info.AppHash); err != nil {
|
||||
+ return errors.Wrapf(err, "check sum for file %q failed", s.appPath)
|
||||
+ }
|
||||
+
|
||||
+ // base image tarball can not be empty
|
||||
+ if len(s.basePath) == 0 {
|
||||
+ return errors.New("base image tarball path can not be empty")
|
||||
+ }
|
||||
+ if err := util.CheckSum(s.basePath, s.info.BaseHash); err != nil {
|
||||
+ return errors.Wrapf(err, "check sum for file %q failed", s.basePath)
|
||||
+ }
|
||||
+
|
||||
+ // lib image may be empty image
|
||||
+ if len(s.libPath) != 0 {
|
||||
+ if err := util.CheckSum(s.libPath, s.info.LibHash); err != nil {
|
||||
+ return errors.Wrapf(err, "check sum for file %q failed", s.libPath)
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (s *separatorLoad) processTarballs() (string, error) {
|
||||
+ if err := s.unpackTarballs(); err != nil {
|
||||
+ return "", err
|
||||
+ }
|
||||
+
|
||||
+ if err := s.reconstructImage(); err != nil {
|
||||
+ return "", err
|
||||
+ }
|
||||
+
|
||||
+ // pack app image to tarball
|
||||
+ tarPath := filepath.Join(s.tmpDir.root, unionCompressedTarName)
|
||||
+ if err := util.PackFiles(s.tmpDir.base, tarPath, archive.Gzip, true); err != nil {
|
||||
+ return "", err
|
||||
+ }
|
||||
+
|
||||
+ return tarPath, nil
|
||||
+}
|
||||
+
|
||||
+func (s *separatorLoad) unpackTarballs() error {
|
||||
+ if err := s.makeTempDir(); err != nil {
|
||||
+ return errors.Wrap(err, "failed to make temporary directories")
|
||||
+ }
|
||||
+
|
||||
+ // unpack base first and the later images will be moved here
|
||||
+ if err := util.UnpackFile(s.basePath, s.tmpDir.base, archive.Gzip, false); err != nil {
|
||||
+ return errors.Wrapf(err, "unpack base tarball %q failed", s.basePath)
|
||||
+ }
|
||||
+
|
||||
+ if err := util.UnpackFile(s.appPath, s.tmpDir.app, archive.Gzip, false); err != nil {
|
||||
+ return errors.Wrapf(err, "unpack app tarball %q failed", s.appPath)
|
||||
+ }
|
||||
+
|
||||
+ if len(s.libPath) != 0 {
|
||||
+ if err := util.UnpackFile(s.libPath, s.tmpDir.lib, archive.Gzip, false); err != nil {
|
||||
+ return errors.Wrapf(err, "unpack lib tarball %q failed", s.libPath)
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (s *separatorLoad) reconstructImage() error {
|
||||
+ files, err := ioutil.ReadDir(s.tmpDir.app)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+
|
||||
+ for _, f := range files {
|
||||
+ src := filepath.Join(s.tmpDir.app, f.Name())
|
||||
+ dest := filepath.Join(s.tmpDir.base, f.Name())
|
||||
+ if err := os.Rename(src, dest); err != nil {
|
||||
+ return errors.Wrapf(err, "reconstruct app file %q failed", s.info.AppTarName)
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ if len(s.libPath) != 0 {
|
||||
+ files, err := ioutil.ReadDir(s.tmpDir.lib)
|
||||
+ if err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+
|
||||
+ for _, f := range files {
|
||||
+ src := filepath.Join(s.tmpDir.lib, f.Name())
|
||||
+ dest := filepath.Join(s.tmpDir.base, f.Name())
|
||||
+ if err := os.Rename(src, dest); err != nil {
|
||||
+ return errors.Wrapf(err, "reconstruct lib file %q failed", s.info.LibTarName)
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
+
|
||||
+func (s *separatorLoad) makeTempDir() error {
|
||||
+ dirs := []string{s.tmpDir.root, s.tmpDir.app, s.tmpDir.base, s.tmpDir.lib}
|
||||
+ for _, dir := range dirs {
|
||||
+ if err := os.MkdirAll(dir, constant.DefaultRootDirMode); err != nil {
|
||||
+ return err
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ return nil
|
||||
+}
|
||||
--
|
||||
2.27.0
|
||||
|
||||
@ -0,0 +1,440 @@
|
||||
From 6e321766a0b4ace2211c9d39cfce58bf4627e63f Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Wed, 27 Oct 2021 21:32:12 +0800
|
||||
Subject: [PATCH 04/16] test: optimize save client options and add unit test
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
cmd/cli/save.go | 84 ++++++++--------
|
||||
cmd/cli/save_test.go | 232 ++++++++++++++++++++++++++++++++++++++++++-
|
||||
2 files changed, 270 insertions(+), 46 deletions(-)
|
||||
|
||||
diff --git a/cmd/cli/save.go b/cmd/cli/save.go
|
||||
index 4d22798a..599d394d 100644
|
||||
--- a/cmd/cli/save.go
|
||||
+++ b/cmd/cli/save.go
|
||||
@@ -18,7 +18,6 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
- "path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
@@ -51,21 +50,21 @@ const (
|
||||
saveExample = `isula-build ctr-img save busybox:latest -o busybox.tar
|
||||
isula-build ctr-img save 21c3e96ac411 -o myimage.tar
|
||||
isula-build ctr-img save busybox:latest alpine:3.9 -o all.tar
|
||||
-isula-build ctr-img save app:latest app1:latest -d Images
|
||||
+isula-build ctr-img save app:latest -b busybox:latest -d Images
|
||||
isula-build ctr-img save app:latest app1:latest -d Images -b busybox:latest -l lib:latest -r rename.json`
|
||||
)
|
||||
|
||||
// NewSaveCmd cmd for container image saving
|
||||
func NewSaveCmd() *cobra.Command {
|
||||
saveCmd := &cobra.Command{
|
||||
- Use: "save IMAGE [IMAGE...] [FLAGS]",
|
||||
+ Use: "save IMAGE [IMAGE...] FLAGS",
|
||||
Short: "Save image to tarball",
|
||||
Example: saveExample,
|
||||
RunE: saveCommand,
|
||||
}
|
||||
|
||||
saveCmd.PersistentFlags().StringVarP(&saveOpts.path, "output", "o", "", "Path to save the tarball")
|
||||
- saveCmd.PersistentFlags().StringVarP(&saveOpts.sep.destPath, "dest", "d", "Images", "Destination file directory to store separated images")
|
||||
+ saveCmd.PersistentFlags().StringVarP(&saveOpts.sep.destPath, "dest", "d", "", "Destination file directory to store separated images")
|
||||
saveCmd.PersistentFlags().StringVarP(&saveOpts.sep.baseImgName, "base", "b", "", "Base image name of separated images")
|
||||
saveCmd.PersistentFlags().StringVarP(&saveOpts.sep.libImageName, "lib", "l", "", "Lib image name of separated images")
|
||||
saveCmd.PersistentFlags().StringVarP(&saveOpts.sep.renameFile, "rename", "r", "", "Rename json file path of separated images")
|
||||
@@ -95,12 +94,16 @@ func saveCommand(cmd *cobra.Command, args []string) error {
|
||||
}
|
||||
|
||||
func (sep *separatorSaveOption) check(pwd string) error {
|
||||
- if len(sep.baseImgName) != 0 {
|
||||
- if !util.IsValidImageName(sep.baseImgName) {
|
||||
- return errors.Errorf("invalid base image name %s", sep.baseImgName)
|
||||
- }
|
||||
+ if len(sep.baseImgName) == 0 {
|
||||
+ return errors.New("base image name(-b) must be provided")
|
||||
+ }
|
||||
+ if !util.IsValidImageName(sep.baseImgName) {
|
||||
+ return errors.Errorf("invalid base image name %s", sep.baseImgName)
|
||||
}
|
||||
if len(sep.libImageName) != 0 {
|
||||
+ if sep.libImageName == sep.baseImgName {
|
||||
+ return errors.New("base and lib images are the same")
|
||||
+ }
|
||||
if !util.IsValidImageName(sep.libImageName) {
|
||||
return errors.Errorf("invalid lib image name %s", sep.libImageName)
|
||||
}
|
||||
@@ -108,16 +111,12 @@ func (sep *separatorSaveOption) check(pwd string) error {
|
||||
if len(sep.destPath) == 0 {
|
||||
sep.destPath = "Images"
|
||||
}
|
||||
- if !filepath.IsAbs(sep.destPath) {
|
||||
- sep.destPath = util.MakeAbsolute(sep.destPath, pwd)
|
||||
- }
|
||||
+ sep.destPath = util.MakeAbsolute(sep.destPath, pwd)
|
||||
if util.IsExist(sep.destPath) {
|
||||
- return errors.Errorf("output file already exist: %q, try to remove existing tarball or rename output file", sep.destPath)
|
||||
+ return errors.Errorf("dest path already exist: %q, try to remove or rename it", sep.destPath)
|
||||
}
|
||||
if len(sep.renameFile) != 0 {
|
||||
- if !filepath.IsAbs(sep.renameFile) {
|
||||
- sep.renameFile = util.MakeAbsolute(sep.renameFile, pwd)
|
||||
- }
|
||||
+ sep.renameFile = util.MakeAbsolute(sep.renameFile, pwd)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -136,39 +135,36 @@ func (opt *saveOptions) checkSaveOpts(args []string) error {
|
||||
return errors.New("get current path failed")
|
||||
}
|
||||
|
||||
- // normal save
|
||||
- if !opt.sep.isEnabled() {
|
||||
- // only check oci format when doing normal save operation
|
||||
- if opt.format == constant.OCITransport && len(args) >= 2 {
|
||||
- return errors.New("oci image format now only supports saving single image")
|
||||
+ // separator save
|
||||
+ if opt.sep.isEnabled() {
|
||||
+ if len(opt.path) != 0 {
|
||||
+ return errors.New("conflict flags between -o and [-b -l -r -d]")
|
||||
}
|
||||
- if err := util.CheckImageFormat(opt.format); err != nil {
|
||||
+ // separate image only support docker image spec
|
||||
+ opt.format = constant.DockerTransport
|
||||
+ if err := opt.sep.check(pwd); err != nil {
|
||||
return err
|
||||
}
|
||||
- if len(opt.path) == 0 {
|
||||
- return errors.New("output path should not be empty")
|
||||
- }
|
||||
- if !filepath.IsAbs(opt.path) {
|
||||
- opt.path = util.MakeAbsolute(opt.path, pwd)
|
||||
- }
|
||||
- if util.IsExist(opt.path) {
|
||||
- return errors.Errorf("output file already exist: %q, try to remove existing tarball or rename output file", opt.path)
|
||||
- }
|
||||
+ opt.sep.enabled = true
|
||||
+
|
||||
return nil
|
||||
}
|
||||
|
||||
- // separator save
|
||||
- opt.sep.enabled = true
|
||||
- if len(opt.path) != 0 {
|
||||
- return errors.New("conflict options between -o and [-b -l -r]")
|
||||
+ // normal save
|
||||
+ // only check oci format when doing normal save operation
|
||||
+ if len(opt.path) == 0 {
|
||||
+ return errors.New("output path(-o) should not be empty")
|
||||
}
|
||||
- // separate image only support docker image spec
|
||||
- opt.format = constant.DockerTransport
|
||||
-
|
||||
- if err := opt.sep.check(pwd); err != nil {
|
||||
+ if opt.format == constant.OCITransport && len(args) >= 2 {
|
||||
+ return errors.New("oci image format now only supports saving single image")
|
||||
+ }
|
||||
+ if err := util.CheckImageFormat(opt.format); err != nil {
|
||||
return err
|
||||
}
|
||||
-
|
||||
+ opt.path = util.MakeAbsolute(opt.path, pwd)
|
||||
+ if util.IsExist(opt.path) {
|
||||
+ return errors.Errorf("output file already exist: %q, try to remove existing tarball or rename output file", opt.path)
|
||||
+ }
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -177,10 +173,10 @@ func runSave(ctx context.Context, cli Cli, args []string) error {
|
||||
saveOpts.images = args
|
||||
|
||||
sep := &pb.SeparatorSave{
|
||||
- Base: saveOpts.sep.baseImgName,
|
||||
- Lib: saveOpts.sep.libImageName,
|
||||
- Rename: saveOpts.sep.renameFile,
|
||||
- Dest: saveOpts.sep.destPath,
|
||||
+ Base: saveOpts.sep.baseImgName,
|
||||
+ Lib: saveOpts.sep.libImageName,
|
||||
+ Rename: saveOpts.sep.renameFile,
|
||||
+ Dest: saveOpts.sep.destPath,
|
||||
Enabled: saveOpts.sep.enabled,
|
||||
}
|
||||
|
||||
@@ -212,5 +208,5 @@ func runSave(ctx context.Context, cli Cli, args []string) error {
|
||||
}
|
||||
|
||||
func (sep *separatorSaveOption) isEnabled() bool {
|
||||
- return util.AnyFlagSet(sep.baseImgName, sep.libImageName, sep.renameFile)
|
||||
+ return util.AnyFlagSet(sep.baseImgName, sep.libImageName, sep.renameFile, sep.destPath)
|
||||
}
|
||||
diff --git a/cmd/cli/save_test.go b/cmd/cli/save_test.go
|
||||
index 3fe6bf81..72f6ded3 100644
|
||||
--- a/cmd/cli/save_test.go
|
||||
+++ b/cmd/cli/save_test.go
|
||||
@@ -16,10 +16,13 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
+ "os"
|
||||
+ "path/filepath"
|
||||
"testing"
|
||||
|
||||
"gotest.tools/v3/assert"
|
||||
"gotest.tools/v3/fs"
|
||||
+ constant "isula.org/isula-build"
|
||||
)
|
||||
|
||||
func TestSaveCommand(t *testing.T) {
|
||||
@@ -38,7 +41,7 @@ func TestSaveCommand(t *testing.T) {
|
||||
wantErr bool
|
||||
}
|
||||
|
||||
- // For normal cases, default err is "invalid socket path: unix:///var/run/isula_build.sock".
|
||||
+ // For normal cases, default err is "invalid socket path: unix:///var/run/isula_build.sock".
|
||||
// As daemon is not running as we run unit test.
|
||||
var testcases = []testcase{
|
||||
{
|
||||
@@ -86,7 +89,7 @@ func TestSaveCommand(t *testing.T) {
|
||||
path: "",
|
||||
args: []string{"testImage"},
|
||||
wantErr: true,
|
||||
- errString: "output path should not be empty",
|
||||
+ errString: "output path(-o) should not be empty",
|
||||
format: "docker",
|
||||
},
|
||||
{
|
||||
@@ -194,3 +197,228 @@ func TestRunSave(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
+
|
||||
+func TestCheckSaveOpts(t *testing.T) {
|
||||
+ pwd, err := os.Getwd()
|
||||
+ assert.NilError(t, err)
|
||||
+ existDirPath := filepath.Join(pwd, "DirAlreadyExist")
|
||||
+ existFilePath := filepath.Join(pwd, "FileAlreadExist")
|
||||
+ err = os.Mkdir(existDirPath, constant.DefaultRootDirMode)
|
||||
+ assert.NilError(t, err)
|
||||
+ _, err = os.Create(existFilePath)
|
||||
+ assert.NilError(t, err)
|
||||
+ defer os.Remove(existDirPath)
|
||||
+ defer os.Remove(existFilePath)
|
||||
+
|
||||
+ type fields struct {
|
||||
+ images []string
|
||||
+ sep separatorSaveOption
|
||||
+ path string
|
||||
+ saveID string
|
||||
+ format string
|
||||
+ }
|
||||
+ type args struct {
|
||||
+ args []string
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ fields fields
|
||||
+ args args
|
||||
+ wantErr bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-normal save",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ path: "test.tar",
|
||||
+ format: constant.DockerTransport,
|
||||
+ },
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-normal save with empty args",
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-normal save with path has colon in it",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ path: "invalid:path.tar",
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-normal save without path",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-normal save with oci format",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ path: "test.tar",
|
||||
+ format: constant.OCITransport,
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-normal save with invalid format",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ path: "test.tar",
|
||||
+ format: "invalidFormat",
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-normal save with path already exist",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ path: existFilePath,
|
||||
+ format: constant.DockerTransport,
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated save",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ format: constant.DockerTransport,
|
||||
+ sep: separatorSaveOption{
|
||||
+ baseImgName: "base",
|
||||
+ libImageName: "lib",
|
||||
+ renameFile: "rename.json",
|
||||
+ destPath: "Images",
|
||||
+ },
|
||||
+ },
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated save with -o flag",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ path: "test.tar",
|
||||
+ format: constant.DockerTransport,
|
||||
+ sep: separatorSaveOption{
|
||||
+ baseImgName: "base",
|
||||
+ libImageName: "lib",
|
||||
+ renameFile: "rename.json",
|
||||
+ destPath: "Images",
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated save without -b flag",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ format: constant.DockerTransport,
|
||||
+ sep: separatorSaveOption{
|
||||
+ libImageName: "lib",
|
||||
+ renameFile: "rename.json",
|
||||
+ destPath: "Images",
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated save invalid base image name",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ format: constant.DockerTransport,
|
||||
+ sep: separatorSaveOption{
|
||||
+ baseImgName: "in:valid:base:name",
|
||||
+ libImageName: "lib",
|
||||
+ renameFile: "rename.json",
|
||||
+ destPath: "Images",
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated save invalid lib image name",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ format: constant.DockerTransport,
|
||||
+ sep: separatorSaveOption{
|
||||
+ baseImgName: "base",
|
||||
+ libImageName: "in:valid:lib:name",
|
||||
+ renameFile: "rename.json",
|
||||
+ destPath: "Images",
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated save without dest option",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ format: constant.DockerTransport,
|
||||
+ sep: separatorSaveOption{
|
||||
+ baseImgName: "base",
|
||||
+ libImageName: "lib",
|
||||
+ renameFile: "rename.json",
|
||||
+ },
|
||||
+ },
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated save with dest already exist",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ format: constant.DockerTransport,
|
||||
+ sep: separatorSaveOption{
|
||||
+ baseImgName: "base",
|
||||
+ libImageName: "lib",
|
||||
+ renameFile: "rename.json",
|
||||
+ destPath: existDirPath,
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated save with same base and lib image",
|
||||
+ args: args{[]string{"app:latest", "app1:latest"}},
|
||||
+ fields: fields{
|
||||
+ images: []string{"app:latest", "app1:latest"},
|
||||
+ format: constant.DockerTransport,
|
||||
+ sep: separatorSaveOption{
|
||||
+ baseImgName: "same:image",
|
||||
+ libImageName: "same:image",
|
||||
+ renameFile: "rename.json",
|
||||
+ destPath: existDirPath,
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ opt := &saveOptions{
|
||||
+ images: tt.fields.images,
|
||||
+ sep: tt.fields.sep,
|
||||
+ path: tt.fields.path,
|
||||
+ saveID: tt.fields.saveID,
|
||||
+ format: tt.fields.format,
|
||||
+ }
|
||||
+ if err := opt.checkSaveOpts(tt.args.args); (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("saveOptions.checkSaveOpts() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
--
|
||||
2.27.0
|
||||
|
||||
@ -0,0 +1,409 @@
|
||||
From 2f8f5aa8c8444e9d9c39eba2c060e4e9fa4089bc Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Thu, 28 Oct 2021 15:03:04 +0800
|
||||
Subject: [PATCH 06/16] test: optimize load client options and add unit test
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
cmd/cli/load.go | 77 ++++++++--------
|
||||
cmd/cli/load_test.go | 209 +++++++++++++++++++++++++++++++++++++++++++
|
||||
cmd/cli/mock.go | 7 +-
|
||||
3 files changed, 252 insertions(+), 41 deletions(-)
|
||||
|
||||
diff --git a/cmd/cli/load.go b/cmd/cli/load.go
|
||||
index 2a9df772..cf142592 100644
|
||||
--- a/cmd/cli/load.go
|
||||
+++ b/cmd/cli/load.go
|
||||
@@ -20,7 +20,6 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
- "path/filepath"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/cobra"
|
||||
@@ -56,7 +55,7 @@ isula-build ctr-img load -i app:latest -d /home/Images -b /home/Images/base.tar.
|
||||
// NewLoadCmd returns image load command
|
||||
func NewLoadCmd() *cobra.Command {
|
||||
loadCmd := &cobra.Command{
|
||||
- Use: "load [FLAGS]",
|
||||
+ Use: "load FLAGS",
|
||||
Short: "Load images",
|
||||
Example: loadExample,
|
||||
Args: util.NoArgs,
|
||||
@@ -122,20 +121,13 @@ func runLoad(ctx context.Context, cli Cli) error {
|
||||
return err
|
||||
}
|
||||
|
||||
-func resolveLoadPath(path string) (string, error) {
|
||||
+func resolveLoadPath(path, pwd string) (string, error) {
|
||||
// check input
|
||||
if path == "" {
|
||||
return "", errors.New("tarball path should not be empty")
|
||||
}
|
||||
|
||||
- if !filepath.IsAbs(path) {
|
||||
- pwd, err := os.Getwd()
|
||||
- if err != nil {
|
||||
- return "", errors.Wrap(err, "get current path failed while loading image")
|
||||
- }
|
||||
- path = util.MakeAbsolute(path, pwd)
|
||||
- }
|
||||
-
|
||||
+ path = util.MakeAbsolute(path, pwd)
|
||||
if err := util.CheckLoadFile(path); err != nil {
|
||||
return "", err
|
||||
}
|
||||
@@ -144,30 +136,35 @@ func resolveLoadPath(path string) (string, error) {
|
||||
}
|
||||
|
||||
func (opt *loadOptions) checkLoadOpts() error {
|
||||
- // normal load
|
||||
- if !opt.sep.isEnabled() {
|
||||
- path, err := resolveLoadPath(opt.path)
|
||||
- if err != nil {
|
||||
- return err
|
||||
- }
|
||||
- opt.path = path
|
||||
-
|
||||
- return nil
|
||||
+ pwd, err := os.Getwd()
|
||||
+ if err != nil {
|
||||
+ return errors.New("get current path failed")
|
||||
}
|
||||
|
||||
// load separated image
|
||||
- opt.sep.enabled = true
|
||||
- if len(opt.path) == 0 {
|
||||
- return errors.New("app image should not be empty")
|
||||
- }
|
||||
+ if opt.sep.isEnabled() {
|
||||
+ // Use opt.path as app image name when operating separated images
|
||||
+ // this can be mark as a switch for handling separated images
|
||||
+ opt.sep.app = opt.path
|
||||
+
|
||||
+ if len(opt.sep.app) == 0 {
|
||||
+ return errors.New("app image name(-i) should not be empty")
|
||||
+ }
|
||||
+
|
||||
+ if cErr := opt.sep.check(pwd); cErr != nil {
|
||||
+ return cErr
|
||||
+ }
|
||||
+ opt.sep.enabled = true
|
||||
|
||||
- // Use opt.path as app image name when operating separated images
|
||||
- // this can be mark as a switch for handling separated images
|
||||
- opt.sep.app = opt.path
|
||||
+ return nil
|
||||
+ }
|
||||
|
||||
- if err := opt.sep.check(); err != nil {
|
||||
+ // normal load
|
||||
+ path, err := resolveLoadPath(opt.path, pwd)
|
||||
+ if err != nil {
|
||||
return err
|
||||
}
|
||||
+ opt.path = path
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -176,35 +173,35 @@ func (sep *separatorLoadOption) isEnabled() bool {
|
||||
return util.AnyFlagSet(sep.dir, sep.base, sep.lib, sep.app)
|
||||
}
|
||||
|
||||
-func (sep *separatorLoadOption) check() error {
|
||||
- pwd, err := os.Getwd()
|
||||
- if err != nil {
|
||||
- return errors.New("get current path failed")
|
||||
+func (sep *separatorLoadOption) check(pwd string) error {
|
||||
+ if len(sep.dir) == 0 {
|
||||
+ return errors.New("image tarball directory should not be empty")
|
||||
}
|
||||
+
|
||||
+ if sep.base == sep.lib {
|
||||
+ return errors.New("base and lib tarballs are the same")
|
||||
+ }
|
||||
+
|
||||
if !util.IsValidImageName(sep.app) {
|
||||
return errors.Errorf("invalid image name: %s", sep.app)
|
||||
}
|
||||
|
||||
if len(sep.base) != 0 {
|
||||
- path, err := resolveLoadPath(sep.base)
|
||||
+ path, err := resolveLoadPath(sep.base, pwd)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "resolve base tarball path failed")
|
||||
}
|
||||
sep.base = path
|
||||
}
|
||||
if len(sep.lib) != 0 {
|
||||
- path, err := resolveLoadPath(sep.lib)
|
||||
+ path, err := resolveLoadPath(sep.lib, pwd)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "resolve lib tarball path failed")
|
||||
}
|
||||
sep.lib = path
|
||||
}
|
||||
- if len(sep.dir) == 0 {
|
||||
- return errors.New("image tarball directory should not be empty")
|
||||
- }
|
||||
- if !filepath.IsAbs(sep.dir) {
|
||||
- sep.dir = util.MakeAbsolute(sep.dir, pwd)
|
||||
- }
|
||||
+
|
||||
+ sep.dir = util.MakeAbsolute(sep.dir, pwd)
|
||||
if !util.IsExist(sep.dir) {
|
||||
return errors.Errorf("image tarball directory %s is not exist", sep.dir)
|
||||
}
|
||||
diff --git a/cmd/cli/load_test.go b/cmd/cli/load_test.go
|
||||
index b7bf2a57..0bad4cbd 100644
|
||||
--- a/cmd/cli/load_test.go
|
||||
+++ b/cmd/cli/load_test.go
|
||||
@@ -16,6 +16,7 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"io/ioutil"
|
||||
+ "os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
@@ -121,3 +122,211 @@ func TestRunLoad(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
+
|
||||
+func TestResolveLoadPath(t *testing.T) {
|
||||
+ dir := fs.NewDir(t, t.Name())
|
||||
+ fileWithContent := fs.NewFile(t, filepath.Join(t.Name(), "test.tar"))
|
||||
+ ioutil.WriteFile(fileWithContent.Path(), []byte("This is test file"), constant.DefaultRootFileMode)
|
||||
+ emptyFile := fs.NewFile(t, filepath.Join(t.Name(), "empty.tar"))
|
||||
+
|
||||
+ defer dir.Remove()
|
||||
+ defer fileWithContent.Remove()
|
||||
+ defer emptyFile.Remove()
|
||||
+
|
||||
+ type args struct {
|
||||
+ path string
|
||||
+ pwd string
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ want string
|
||||
+ wantErr bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-normal load path",
|
||||
+ args: args{
|
||||
+ path: fileWithContent.Path(),
|
||||
+ pwd: dir.Path(),
|
||||
+ },
|
||||
+ want: fileWithContent.Path(),
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-empty load path",
|
||||
+ args: args{
|
||||
+ pwd: dir.Path(),
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-empty load file",
|
||||
+ args: args{
|
||||
+ path: emptyFile.Path(),
|
||||
+ pwd: dir.Path(),
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ got, err := resolveLoadPath(tt.args.path, tt.args.pwd)
|
||||
+ if (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("resolveLoadPath() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ return
|
||||
+ }
|
||||
+ if got != tt.want {
|
||||
+ t.Errorf("resolveLoadPath() = %v, want %v", got, tt.want)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func TestCheckLoadOpts(t *testing.T) {
|
||||
+ root := fs.NewDir(t, t.Name())
|
||||
+ defer root.Remove()
|
||||
+ emptyFile, err := os.Create(filepath.Join(root.Path(), "empty.tar"))
|
||||
+ assert.NilError(t, err)
|
||||
+ fileWithContent, err := os.Create(filepath.Join(root.Path(), "test.tar"))
|
||||
+ assert.NilError(t, err)
|
||||
+ ioutil.WriteFile(fileWithContent.Name(), []byte("This is test file"), constant.DefaultRootFileMode)
|
||||
+ baseFile, err := os.Create(filepath.Join(root.Path(), "base.tar"))
|
||||
+ assert.NilError(t, err)
|
||||
+ ioutil.WriteFile(baseFile.Name(), []byte("This is base file"), constant.DefaultRootFileMode)
|
||||
+ libFile, err := os.Create(filepath.Join(root.Path(), "lib.tar"))
|
||||
+ ioutil.WriteFile(libFile.Name(), []byte("This is lib file"), constant.DefaultRootFileMode)
|
||||
+
|
||||
+ type fields struct {
|
||||
+ path string
|
||||
+ loadID string
|
||||
+ sep separatorLoadOption
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ fields fields
|
||||
+ wantErr bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-normal load options",
|
||||
+ fields: fields{
|
||||
+ path: fileWithContent.Name(),
|
||||
+ },
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-empty load path",
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-empty load file",
|
||||
+ fields: fields{
|
||||
+ path: emptyFile.Name(),
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated load",
|
||||
+ fields: fields{
|
||||
+ path: "app:latest",
|
||||
+ sep: separatorLoadOption{
|
||||
+ dir: root.Path(),
|
||||
+ app: "app:latest",
|
||||
+ base: baseFile.Name(),
|
||||
+ lib: libFile.Name(),
|
||||
+ },
|
||||
+ },
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated load with empty app name",
|
||||
+ fields: fields{
|
||||
+ sep: separatorLoadOption{
|
||||
+ dir: root.Path(),
|
||||
+ base: baseFile.Name(),
|
||||
+ lib: libFile.Name(),
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated load with empty dir",
|
||||
+ fields: fields{
|
||||
+ path: "app:latest",
|
||||
+ sep: separatorLoadOption{
|
||||
+ base: baseFile.Name(),
|
||||
+ lib: libFile.Name(),
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated load with invalid app name",
|
||||
+ fields: fields{
|
||||
+ path: "invalid:app:name",
|
||||
+ sep: separatorLoadOption{
|
||||
+ dir: root.Path(),
|
||||
+ base: baseFile.Name(),
|
||||
+ lib: libFile.Name(),
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated load with empty base tarball",
|
||||
+ fields: fields{
|
||||
+ path: "app:latest",
|
||||
+ sep: separatorLoadOption{
|
||||
+ dir: root.Path(),
|
||||
+ base: emptyFile.Name(),
|
||||
+ lib: libFile.Name(),
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated load with empty lib tarball",
|
||||
+ fields: fields{
|
||||
+ path: "app:latest",
|
||||
+ sep: separatorLoadOption{
|
||||
+ dir: root.Path(),
|
||||
+ base: baseFile.Name(),
|
||||
+ lib: emptyFile.Name(),
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated load with same base and lib tarball",
|
||||
+ fields: fields{
|
||||
+ path: "app:latest",
|
||||
+ sep: separatorLoadOption{
|
||||
+ dir: root.Path(),
|
||||
+ base: fileWithContent.Name(),
|
||||
+ lib: fileWithContent.Name(),
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-separated load with dir not exist",
|
||||
+ fields: fields{
|
||||
+ path: "app:latest",
|
||||
+ sep: separatorLoadOption{
|
||||
+ dir: "path not exist",
|
||||
+ base: baseFile.Name(),
|
||||
+ lib: libFile.Name(),
|
||||
+ },
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ opt := &loadOptions{
|
||||
+ path: tt.fields.path,
|
||||
+ loadID: tt.fields.loadID,
|
||||
+ sep: tt.fields.sep,
|
||||
+ }
|
||||
+ if err := opt.checkLoadOpts(); (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("loadOptions.checkLoadOpts() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
diff --git a/cmd/cli/mock.go b/cmd/cli/mock.go
|
||||
index 142c87fa..23a8a031 100644
|
||||
--- a/cmd/cli/mock.go
|
||||
+++ b/cmd/cli/mock.go
|
||||
@@ -16,6 +16,7 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
+ "os"
|
||||
"testing"
|
||||
|
||||
"github.com/gogo/protobuf/types"
|
||||
@@ -324,7 +325,11 @@ func (f *mockDaemon) load(_ context.Context, in *pb.LoadRequest, opts ...grpc.Ca
|
||||
if path == "" {
|
||||
return &mockLoadClient{}, errors.Errorf("tarball path should not be empty")
|
||||
}
|
||||
- _, err := resolveLoadPath(path)
|
||||
+ pwd, err := os.Getwd()
|
||||
+ if err != nil {
|
||||
+ return &mockLoadClient{}, err
|
||||
+ }
|
||||
+ _, err = resolveLoadPath(path, pwd)
|
||||
return &mockLoadClient{}, err
|
||||
}
|
||||
|
||||
--
|
||||
2.27.0
|
||||
|
||||
@ -0,0 +1,203 @@
|
||||
From c5fe173afd31636bf014dac31f6e601d91e1ae53 Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Sat, 30 Oct 2021 10:12:40 +0800
|
||||
Subject: [PATCH 12/16] bugfix: fix when load separated image error return
|
||||
|
||||
reason: if base and lib dir are both not provided, daemon
|
||||
side will read the info from "manifest" file in the dest dir
|
||||
automatically, so no error return here
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
cmd/cli/load.go | 4 +--
|
||||
cmd/cli/load_test.go | 59 +++++++++++++++++++++++++++++++++-----------
|
||||
2 files changed, 47 insertions(+), 16 deletions(-)
|
||||
|
||||
diff --git a/cmd/cli/load.go b/cmd/cli/load.go
|
||||
index cf142592..44fefdd2 100644
|
||||
--- a/cmd/cli/load.go
|
||||
+++ b/cmd/cli/load.go
|
||||
@@ -178,7 +178,7 @@ func (sep *separatorLoadOption) check(pwd string) error {
|
||||
return errors.New("image tarball directory should not be empty")
|
||||
}
|
||||
|
||||
- if sep.base == sep.lib {
|
||||
+ if len(sep.base) != 0 && sep.base == sep.lib {
|
||||
return errors.New("base and lib tarballs are the same")
|
||||
}
|
||||
|
||||
@@ -203,7 +203,7 @@ func (sep *separatorLoadOption) check(pwd string) error {
|
||||
|
||||
sep.dir = util.MakeAbsolute(sep.dir, pwd)
|
||||
if !util.IsExist(sep.dir) {
|
||||
- return errors.Errorf("image tarball directory %s is not exist", sep.dir)
|
||||
+ return errors.Errorf("image tarball directory %q is not exist", sep.dir)
|
||||
}
|
||||
|
||||
return nil
|
||||
diff --git a/cmd/cli/load_test.go b/cmd/cli/load_test.go
|
||||
index 0bad4cbd..cb8217ce 100644
|
||||
--- a/cmd/cli/load_test.go
|
||||
+++ b/cmd/cli/load_test.go
|
||||
@@ -15,6 +15,7 @@ package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
+ "fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
@@ -22,7 +23,9 @@ import (
|
||||
|
||||
"gotest.tools/v3/assert"
|
||||
"gotest.tools/v3/fs"
|
||||
+
|
||||
constant "isula.org/isula-build"
|
||||
+ "isula.org/isula-build/util"
|
||||
)
|
||||
|
||||
func TestLoadCmd(t *testing.T) {
|
||||
@@ -182,6 +185,8 @@ func TestResolveLoadPath(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCheckLoadOpts(t *testing.T) {
|
||||
+ pwd, err := os.Getwd()
|
||||
+ assert.NilError(t, err)
|
||||
root := fs.NewDir(t, t.Name())
|
||||
defer root.Remove()
|
||||
emptyFile, err := os.Create(filepath.Join(root.Path(), "empty.tar"))
|
||||
@@ -201,9 +206,10 @@ func TestCheckLoadOpts(t *testing.T) {
|
||||
sep separatorLoadOption
|
||||
}
|
||||
tests := []struct {
|
||||
- name string
|
||||
- fields fields
|
||||
- wantErr bool
|
||||
+ name string
|
||||
+ fields fields
|
||||
+ wantErr bool
|
||||
+ errMessage string
|
||||
}{
|
||||
{
|
||||
name: "TC-normal load options",
|
||||
@@ -212,15 +218,17 @@ func TestCheckLoadOpts(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
- name: "TC-empty load path",
|
||||
- wantErr: true,
|
||||
+ name: "TC-empty load path",
|
||||
+ wantErr: true,
|
||||
+ errMessage: "tarball path should not be empty",
|
||||
},
|
||||
{
|
||||
name: "TC-empty load file",
|
||||
fields: fields{
|
||||
path: emptyFile.Name(),
|
||||
},
|
||||
- wantErr: true,
|
||||
+ wantErr: true,
|
||||
+ errMessage: "loading file is empty",
|
||||
},
|
||||
{
|
||||
name: "TC-separated load",
|
||||
@@ -243,7 +251,8 @@ func TestCheckLoadOpts(t *testing.T) {
|
||||
lib: libFile.Name(),
|
||||
},
|
||||
},
|
||||
- wantErr: true,
|
||||
+ wantErr: true,
|
||||
+ errMessage: "app image name(-i) should not be empty",
|
||||
},
|
||||
{
|
||||
name: "TC-separated load with empty dir",
|
||||
@@ -254,7 +263,8 @@ func TestCheckLoadOpts(t *testing.T) {
|
||||
lib: libFile.Name(),
|
||||
},
|
||||
},
|
||||
- wantErr: true,
|
||||
+ wantErr: true,
|
||||
+ errMessage: "image tarball directory should not be empty",
|
||||
},
|
||||
{
|
||||
name: "TC-separated load with invalid app name",
|
||||
@@ -266,7 +276,8 @@ func TestCheckLoadOpts(t *testing.T) {
|
||||
lib: libFile.Name(),
|
||||
},
|
||||
},
|
||||
- wantErr: true,
|
||||
+ wantErr: true,
|
||||
+ errMessage: fmt.Sprintf("invalid image name: %s", "invalid:app:name"),
|
||||
},
|
||||
{
|
||||
name: "TC-separated load with empty base tarball",
|
||||
@@ -278,7 +289,8 @@ func TestCheckLoadOpts(t *testing.T) {
|
||||
lib: libFile.Name(),
|
||||
},
|
||||
},
|
||||
- wantErr: true,
|
||||
+ wantErr: true,
|
||||
+ errMessage: "resolve base tarball path failed: loading file is empty",
|
||||
},
|
||||
{
|
||||
name: "TC-separated load with empty lib tarball",
|
||||
@@ -290,7 +302,8 @@ func TestCheckLoadOpts(t *testing.T) {
|
||||
lib: emptyFile.Name(),
|
||||
},
|
||||
},
|
||||
- wantErr: true,
|
||||
+ wantErr: true,
|
||||
+ errMessage: "resolve lib tarball path failed: loading file is empty",
|
||||
},
|
||||
{
|
||||
name: "TC-separated load with same base and lib tarball",
|
||||
@@ -302,7 +315,8 @@ func TestCheckLoadOpts(t *testing.T) {
|
||||
lib: fileWithContent.Name(),
|
||||
},
|
||||
},
|
||||
- wantErr: true,
|
||||
+ wantErr: true,
|
||||
+ errMessage: "base and lib tarballs are the same",
|
||||
},
|
||||
{
|
||||
name: "TC-separated load with dir not exist",
|
||||
@@ -314,7 +328,20 @@ func TestCheckLoadOpts(t *testing.T) {
|
||||
lib: libFile.Name(),
|
||||
},
|
||||
},
|
||||
- wantErr: true,
|
||||
+ wantErr: true,
|
||||
+ errMessage: fmt.Sprintf("image tarball directory %q is not exist", util.MakeAbsolute("path not exist", pwd)),
|
||||
+ },
|
||||
+ {
|
||||
+ // if base and lib dir are both not provided, daemon side will read
|
||||
+ // the info from "manifest" file in the dest dir automatically
|
||||
+ // so no error return here
|
||||
+ name: "TC-base and lib dir both not provided",
|
||||
+ fields: fields{
|
||||
+ path: "app:latest",
|
||||
+ sep: separatorLoadOption{
|
||||
+ dir: root.Path(),
|
||||
+ },
|
||||
+ },
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
@@ -324,9 +351,13 @@ func TestCheckLoadOpts(t *testing.T) {
|
||||
loadID: tt.fields.loadID,
|
||||
sep: tt.fields.sep,
|
||||
}
|
||||
- if err := opt.checkLoadOpts(); (err != nil) != tt.wantErr {
|
||||
+ err := opt.checkLoadOpts()
|
||||
+ if (err != nil) != tt.wantErr {
|
||||
t.Errorf("loadOptions.checkLoadOpts() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
+ if err != nil && err.Error() != tt.errMessage {
|
||||
+ t.Errorf("loadOptions.checkLoadOpts() error = %v, wantErr %v", err, tt.errMessage)
|
||||
+ }
|
||||
})
|
||||
}
|
||||
}
|
||||
--
|
||||
2.27.0
|
||||
|
||||
674
patch/0080-util-add-unit-test-for-file.go.patch
Normal file
674
patch/0080-util-add-unit-test-for-file.go.patch
Normal file
@ -0,0 +1,674 @@
|
||||
From d578f50d5ec200a7af83186b282a22cceb927f1b Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Thu, 28 Oct 2021 22:41:18 +0800
|
||||
Subject: [PATCH 08/16] util: add unit test for file.go
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
util/file.go | 42 +++-
|
||||
util/file_test.go | 547 ++++++++++++++++++++++++++++++++++++++++++++++
|
||||
2 files changed, 578 insertions(+), 11 deletions(-)
|
||||
create mode 100644 util/file_test.go
|
||||
|
||||
diff --git a/util/file.go b/util/file.go
|
||||
index cd4a75d5..e0353898 100644
|
||||
--- a/util/file.go
|
||||
+++ b/util/file.go
|
||||
@@ -29,12 +29,26 @@ const (
|
||||
fileMaxSize = 10 * 1024 * 1024 // 10MB
|
||||
)
|
||||
|
||||
+var (
|
||||
+ modifyTime = time.Date(2017, time.January, 0, 0, 0, 0, 0, time.UTC)
|
||||
+ accessTime = time.Date(2017, time.January, 0, 0, 0, 0, 0, time.UTC)
|
||||
+)
|
||||
+
|
||||
// ReadSmallFile read small file less than 10MB
|
||||
func ReadSmallFile(path string) ([]byte, error) {
|
||||
st, err := os.Lstat(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
+
|
||||
+ if !st.Mode().IsRegular() {
|
||||
+ return nil, errors.Errorf("loading file %s should be a regular file", st.Name())
|
||||
+ }
|
||||
+
|
||||
+ if st.Size() == 0 {
|
||||
+ return nil, errors.New("loading file is empty")
|
||||
+ }
|
||||
+
|
||||
if st.Size() > fileMaxSize {
|
||||
return nil, errors.Errorf("file %q too big", path)
|
||||
}
|
||||
@@ -51,18 +65,18 @@ func LoadJSONFile(file string, v interface{}) error {
|
||||
}
|
||||
|
||||
// ChangeDirModifyTime changes modify time of directory
|
||||
-func ChangeDirModifyTime(dir string) error {
|
||||
+func ChangeDirModifyTime(dir string, accessTime, modifyTime time.Time) error {
|
||||
fs, rErr := ioutil.ReadDir(dir)
|
||||
if rErr != nil {
|
||||
return rErr
|
||||
}
|
||||
for _, f := range fs {
|
||||
src := filepath.Join(dir, f.Name())
|
||||
- if err := ChangeFileModifyTime(src); err != nil {
|
||||
+ if err := ChangeFileModifyTime(src, accessTime, modifyTime); err != nil {
|
||||
return err
|
||||
}
|
||||
if f.IsDir() {
|
||||
- if err := ChangeDirModifyTime(src); err != nil {
|
||||
+ if err := ChangeDirModifyTime(src, accessTime, modifyTime); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@@ -71,13 +85,11 @@ func ChangeDirModifyTime(dir string) error {
|
||||
}
|
||||
|
||||
// ChangeFileModifyTime changes modify time of file by fixing time at 2017-01-01 00:00:00
|
||||
-func ChangeFileModifyTime(path string) error {
|
||||
- mtime := time.Date(2017, time.January, 0, 0, 0, 0, 0, time.UTC)
|
||||
- atime := time.Date(2017, time.January, 0, 0, 0, 0, 0, time.UTC)
|
||||
+func ChangeFileModifyTime(path string, accessTime, modifyTime time.Time) error {
|
||||
if _, err := os.Lstat(path); err != nil {
|
||||
return err
|
||||
}
|
||||
- if err := os.Chtimes(path, atime, mtime); err != nil {
|
||||
+ if err := os.Chtimes(path, accessTime, modifyTime); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
@@ -87,9 +99,9 @@ func ChangeFileModifyTime(path string) error {
|
||||
// by using different compression method defined by "com"
|
||||
// the files' modify time attribute will be set to a fix time "2017-01-01 00:00:00"
|
||||
// if set "modifyTime" to true
|
||||
-func PackFiles(src, dest string, com archive.Compression, modifyTime bool) (err error) {
|
||||
- if modifyTime {
|
||||
- if err = ChangeDirModifyTime(src); err != nil {
|
||||
+func PackFiles(src, dest string, com archive.Compression, needModifyTime bool) (err error) {
|
||||
+ if needModifyTime {
|
||||
+ if err = ChangeDirModifyTime(src, accessTime, modifyTime); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@@ -122,6 +134,14 @@ func PackFiles(src, dest string, com archive.Compression, modifyTime bool) (err
|
||||
// by using different compression method defined by "com"
|
||||
// The src file will be remove if set "rm" to true
|
||||
func UnpackFile(src, dest string, com archive.Compression, rm bool) (err error) {
|
||||
+ if len(dest) == 0 {
|
||||
+ return errors.New("unpack: dest path should not be empty")
|
||||
+ }
|
||||
+ d, err := os.Stat(dest)
|
||||
+ if err != nil || !d.IsDir() {
|
||||
+ return errors.Wrapf(err, "unpack: invalid dest path")
|
||||
+ }
|
||||
+
|
||||
cleanPath := filepath.Clean(src)
|
||||
f, err := os.Open(cleanPath) // nolint:gosec
|
||||
if err != nil {
|
||||
@@ -139,7 +159,7 @@ func UnpackFile(src, dest string, com archive.Compression, rm bool) (err error)
|
||||
return errors.Wrapf(err, "unpack file %q failed", src)
|
||||
}
|
||||
|
||||
- if err = ChangeDirModifyTime(dest); err != nil {
|
||||
+ if err = ChangeDirModifyTime(dest, modifyTime, accessTime); err != nil {
|
||||
return errors.Wrapf(err, "change modify time for directory %q failed", dest)
|
||||
}
|
||||
|
||||
diff --git a/util/file_test.go b/util/file_test.go
|
||||
new file mode 100644
|
||||
index 00000000..09aed41d
|
||||
--- /dev/null
|
||||
+++ b/util/file_test.go
|
||||
@@ -0,0 +1,547 @@
|
||||
+// Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
|
||||
+// isula-build licensed under the Mulan PSL v2.
|
||||
+// You can use this software according to the terms and conditions of the Mulan PSL v2.
|
||||
+// You may obtain a copy of Mulan PSL v2 at:
|
||||
+// http://license.coscl.org.cn/MulanPSL2
|
||||
+// THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
|
||||
+// IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
|
||||
+// PURPOSE.
|
||||
+// See the Mulan PSL v2 for more details.
|
||||
+// Author: Xiang Li
|
||||
+// Create: 2021-08-24
|
||||
+// Description: file manipulation related common functions
|
||||
+
|
||||
+package util
|
||||
+
|
||||
+import (
|
||||
+ "encoding/json"
|
||||
+ "io/ioutil"
|
||||
+ "os"
|
||||
+ "path/filepath"
|
||||
+ "reflect"
|
||||
+ "testing"
|
||||
+ "time"
|
||||
+
|
||||
+ "github.com/containers/storage/pkg/archive"
|
||||
+ "gotest.tools/v3/assert"
|
||||
+ "gotest.tools/v3/fs"
|
||||
+ constant "isula.org/isula-build"
|
||||
+)
|
||||
+
|
||||
+func TestReadSmallFile(t *testing.T) {
|
||||
+ smallFile := fs.NewFile(t, t.Name())
|
||||
+ defer smallFile.Remove()
|
||||
+ err := ioutil.WriteFile(smallFile.Path(), []byte("small file"), constant.DefaultRootFileMode)
|
||||
+ assert.NilError(t, err)
|
||||
+
|
||||
+ root := fs.NewDir(t, t.Name())
|
||||
+ defer root.Remove()
|
||||
+
|
||||
+ bigFile := filepath.Join(root.Path(), "bigFile")
|
||||
+ f, err := os.Create(bigFile)
|
||||
+ assert.NilError(t, err)
|
||||
+ defer os.Remove(f.Name())
|
||||
+ err = f.Truncate(fileMaxSize + 1)
|
||||
+ assert.NilError(t, err)
|
||||
+
|
||||
+ emptyFile := fs.NewFile(t, t.Name())
|
||||
+ defer emptyFile.Remove()
|
||||
+
|
||||
+ type args struct {
|
||||
+ path string
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ want []byte
|
||||
+ wantErr bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-normal read",
|
||||
+ args: args{path: smallFile.Path()},
|
||||
+ want: []byte("small file"),
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-not exist path",
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-file too big",
|
||||
+ args: args{path: bigFile},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-empty file",
|
||||
+ args: args{path: emptyFile.Path()},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-invalid file",
|
||||
+ args: args{path: "/dev/cdrom"},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ got, err := ReadSmallFile(tt.args.path)
|
||||
+ if (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("ReadSmallFile() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ return
|
||||
+ }
|
||||
+ if !reflect.DeepEqual(got, tt.want) {
|
||||
+ t.Errorf("ReadSmallFile() = %v, want %v", got, tt.want)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func TestLoadJSONFile(t *testing.T) {
|
||||
+ type rename struct {
|
||||
+ Name string `json:"name"`
|
||||
+ Rename string `json:"rename"`
|
||||
+ }
|
||||
+ type args struct {
|
||||
+ file string
|
||||
+ v rename
|
||||
+ }
|
||||
+
|
||||
+ smallJSONFile := fs.NewFile(t, t.Name())
|
||||
+ defer smallJSONFile.Remove()
|
||||
+ validData := rename{
|
||||
+ Name: "origin name",
|
||||
+ Rename: "modified name",
|
||||
+ }
|
||||
+ b, err := json.Marshal(validData)
|
||||
+ assert.NilError(t, err)
|
||||
+ ioutil.WriteFile(smallJSONFile.Path(), b, constant.DefaultRootFileMode)
|
||||
+
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ wantKey string
|
||||
+ wantValue string
|
||||
+ wantErr bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-normal json file",
|
||||
+ args: args{
|
||||
+ file: smallJSONFile.Path(),
|
||||
+ v: rename{},
|
||||
+ },
|
||||
+ wantKey: "origin name",
|
||||
+ wantValue: "modified name",
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-json file not exist",
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ if err := LoadJSONFile(tt.args.file, &tt.args.v); (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("LoadJSONFile() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ }
|
||||
+ if err == nil {
|
||||
+ assert.Equal(t, tt.args.v.Name, tt.wantKey)
|
||||
+ assert.Equal(t, tt.args.v.Rename, tt.wantValue)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func TestChangeFileModifyTime(t *testing.T) {
|
||||
+ normalFile := fs.NewFile(t, t.Name())
|
||||
+ defer normalFile.Remove()
|
||||
+
|
||||
+ pwd, err := os.Getwd()
|
||||
+ assert.NilError(t, err)
|
||||
+ immutableFile := filepath.Join(pwd, "immutableFile")
|
||||
+ _, err = os.Create(immutableFile)
|
||||
+ defer os.Remove(immutableFile)
|
||||
+
|
||||
+ type args struct {
|
||||
+ path string
|
||||
+ mtime time.Time
|
||||
+ atime time.Time
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ wantErr bool
|
||||
+ needHook bool
|
||||
+ preHookFun func(t *testing.T)
|
||||
+ postHookFun func(t *testing.T)
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-change file modify time",
|
||||
+ args: args{
|
||||
+ path: immutableFile,
|
||||
+ mtime: modifyTime,
|
||||
+ atime: accessTime,
|
||||
+ },
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-file path empty",
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-lack of permession",
|
||||
+ args: args{
|
||||
+ path: immutableFile,
|
||||
+ atime: accessTime,
|
||||
+ mtime: modifyTime,
|
||||
+ },
|
||||
+ needHook: true,
|
||||
+ preHookFun: func(t *testing.T) { Immutable(immutableFile, true) },
|
||||
+ postHookFun: func(t *testing.T) { Immutable(immutableFile, false) },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ if tt.needHook {
|
||||
+ tt.preHookFun(t)
|
||||
+ }
|
||||
+ err := ChangeFileModifyTime(tt.args.path, tt.args.atime, tt.args.mtime)
|
||||
+ if tt.needHook {
|
||||
+ defer tt.postHookFun(t)
|
||||
+ }
|
||||
+ if (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("ChangeFileModifyTime() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ }
|
||||
+ if err == nil {
|
||||
+ f, err := os.Stat(tt.args.path)
|
||||
+ assert.NilError(t, err)
|
||||
+ assert.Equal(t, true, f.ModTime().Equal(modifyTime))
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+type tempDirs struct {
|
||||
+ root string
|
||||
+ subDir1 string
|
||||
+ subDir11 string
|
||||
+ file1 string
|
||||
+ file11 string
|
||||
+}
|
||||
+
|
||||
+func createDirs(t *testing.T) tempDirs {
|
||||
+ pwd, err := os.Getwd()
|
||||
+ assert.NilError(t, err)
|
||||
+
|
||||
+ root := filepath.Join(pwd, t.Name())
|
||||
+ assert.NilError(t, os.Mkdir(root, constant.DefaultRootDirMode))
|
||||
+
|
||||
+ rootSubDir1 := filepath.Join(root, "rootSubDir1")
|
||||
+ assert.NilError(t, os.Mkdir(rootSubDir1, constant.DefaultRootDirMode))
|
||||
+
|
||||
+ rootSubDir11 := filepath.Join(rootSubDir1, "rootSubDir11")
|
||||
+ assert.NilError(t, os.Mkdir(rootSubDir11, constant.DefaultRootDirMode))
|
||||
+
|
||||
+ file1 := filepath.Join(rootSubDir1, "file1")
|
||||
+ _, err = os.Create(file1)
|
||||
+ assert.NilError(t, err)
|
||||
+
|
||||
+ file11 := filepath.Join(rootSubDir11, "file11")
|
||||
+ _, err = os.Create(file11)
|
||||
+ assert.NilError(t, err)
|
||||
+
|
||||
+ return tempDirs{
|
||||
+ root: root,
|
||||
+ subDir1: rootSubDir1,
|
||||
+ subDir11: rootSubDir11,
|
||||
+ file1: file1,
|
||||
+ file11: file11,
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func (tmp *tempDirs) removeAll(t *testing.T) {
|
||||
+ assert.NilError(t, os.RemoveAll(tmp.root))
|
||||
+ assert.NilError(t, os.RemoveAll(tmp.subDir1))
|
||||
+ assert.NilError(t, os.RemoveAll(tmp.subDir11))
|
||||
+ assert.NilError(t, os.RemoveAll(tmp.file1))
|
||||
+ assert.NilError(t, os.RemoveAll(tmp.file11))
|
||||
+}
|
||||
+
|
||||
+func TestChangeDirModifyTime(t *testing.T) {
|
||||
+ tempDirs := createDirs(t)
|
||||
+ defer tempDirs.removeAll(t)
|
||||
+ root := tempDirs.root
|
||||
+
|
||||
+ type args struct {
|
||||
+ dir string
|
||||
+ mtime time.Time
|
||||
+ atime time.Time
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ wantErr bool
|
||||
+ needPreHook bool
|
||||
+ needPostHook bool
|
||||
+ preWalkFun func(path string, info os.FileInfo, err error) error
|
||||
+ postWalkFun func(path string, info os.FileInfo, err error) error
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-normal case modify directory",
|
||||
+ args: args{
|
||||
+ dir: root,
|
||||
+ mtime: modifyTime,
|
||||
+ atime: accessTime,
|
||||
+ },
|
||||
+ needPostHook: true,
|
||||
+ postWalkFun: func(path string, info os.FileInfo, err error) error {
|
||||
+ assert.Assert(t, true, info.ModTime().Equal(modifyTime))
|
||||
+ return nil
|
||||
+ },
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-empty path",
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-lack of permission",
|
||||
+ args: args{
|
||||
+ dir: root,
|
||||
+ mtime: modifyTime,
|
||||
+ atime: accessTime,
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ needPreHook: true,
|
||||
+ needPostHook: true,
|
||||
+ preWalkFun: func(path string, info os.FileInfo, err error) error {
|
||||
+ if !info.IsDir() {
|
||||
+ Immutable(path, true)
|
||||
+ }
|
||||
+ return nil
|
||||
+ },
|
||||
+ postWalkFun: func(path string, info os.FileInfo, err error) error {
|
||||
+ if !info.IsDir() {
|
||||
+ Immutable(path, false)
|
||||
+ }
|
||||
+ return nil
|
||||
+ },
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ if tt.needPreHook {
|
||||
+ wErr := filepath.Walk(tt.args.dir, tt.preWalkFun)
|
||||
+ assert.NilError(t, wErr)
|
||||
+ }
|
||||
+ err := ChangeDirModifyTime(tt.args.dir, tt.args.mtime, tt.args.atime)
|
||||
+ if (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("ChangeDirModifyTime() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ }
|
||||
+ if tt.needPostHook {
|
||||
+ wErr := filepath.Walk(tt.args.dir, tt.postWalkFun)
|
||||
+ assert.NilError(t, wErr)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func TestPackFiles(t *testing.T) {
|
||||
+ dirs := createDirs(t)
|
||||
+ defer dirs.removeAll(t)
|
||||
+ dest := fs.NewFile(t, t.Name())
|
||||
+ defer dest.Remove()
|
||||
+
|
||||
+ type args struct {
|
||||
+ src string
|
||||
+ dest string
|
||||
+ com archive.Compression
|
||||
+ needModifyTime bool
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ wantErr bool
|
||||
+ needPreHook bool
|
||||
+ needPostHook bool
|
||||
+ preWalkFun func(path string, info os.FileInfo, err error) error
|
||||
+ postWalkFun func(path string, info os.FileInfo, err error) error
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-normal pack",
|
||||
+ args: args{
|
||||
+ src: dirs.root,
|
||||
+ dest: dest.Path(),
|
||||
+ com: archive.Gzip,
|
||||
+ needModifyTime: true,
|
||||
+ },
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-empty dest",
|
||||
+ args: args{
|
||||
+ src: dirs.root,
|
||||
+ com: archive.Gzip,
|
||||
+ needModifyTime: true,
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-invalid compression",
|
||||
+ args: args{
|
||||
+ src: dirs.root,
|
||||
+ dest: dest.Path(),
|
||||
+ com: archive.Compression(-1),
|
||||
+ needModifyTime: true,
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-lack of permission",
|
||||
+ args: args{
|
||||
+ src: dirs.root,
|
||||
+ dest: dest.Path(),
|
||||
+ com: archive.Gzip,
|
||||
+ needModifyTime: true,
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ needPreHook: true,
|
||||
+ needPostHook: true,
|
||||
+ preWalkFun: func(path string, info os.FileInfo, err error) error {
|
||||
+ if !info.IsDir() {
|
||||
+ Immutable(path, true)
|
||||
+ }
|
||||
+ return nil
|
||||
+ },
|
||||
+ postWalkFun: func(path string, info os.FileInfo, err error) error {
|
||||
+ if !info.IsDir() {
|
||||
+ Immutable(path, false)
|
||||
+ }
|
||||
+ return nil
|
||||
+ },
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ if tt.needPreHook {
|
||||
+ wErr := filepath.Walk(tt.args.src, tt.preWalkFun)
|
||||
+ assert.NilError(t, wErr)
|
||||
+ }
|
||||
+ if err := PackFiles(tt.args.src, tt.args.dest, tt.args.com, tt.args.needModifyTime); (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("PackFiles() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ }
|
||||
+ if tt.needPostHook {
|
||||
+ wErr := filepath.Walk(tt.args.src, tt.postWalkFun)
|
||||
+ assert.NilError(t, wErr)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func TestUnpackFile(t *testing.T) {
|
||||
+ folderToBePacked := createDirs(t)
|
||||
+ defer folderToBePacked.removeAll(t)
|
||||
+ pwd, err := os.Getwd()
|
||||
+ assert.NilError(t, err)
|
||||
+
|
||||
+ tarName := filepath.Join(pwd, "test.tar")
|
||||
+ assert.NilError(t, PackFiles(folderToBePacked.root, tarName, archive.Gzip, true))
|
||||
+ defer os.RemoveAll(tarName)
|
||||
+
|
||||
+ invalidTar := filepath.Join(pwd, "invalid.tar")
|
||||
+ err = ioutil.WriteFile(invalidTar, []byte("invalid tar"), constant.DefaultRootFileMode)
|
||||
+ assert.NilError(t, err)
|
||||
+ defer os.RemoveAll(invalidTar)
|
||||
+
|
||||
+ unpackDest := filepath.Join(pwd, "unpack")
|
||||
+ assert.NilError(t, os.MkdirAll(unpackDest, constant.DefaultRootDirMode))
|
||||
+ defer os.RemoveAll(unpackDest)
|
||||
+
|
||||
+ type args struct {
|
||||
+ src string
|
||||
+ dest string
|
||||
+ com archive.Compression
|
||||
+ rm bool
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ needPreHook bool
|
||||
+ needPostHook bool
|
||||
+ wantErr bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "normal unpack file",
|
||||
+ args: args{
|
||||
+ src: tarName,
|
||||
+ dest: unpackDest,
|
||||
+ com: archive.Gzip,
|
||||
+ rm: true,
|
||||
+ },
|
||||
+ },
|
||||
+ {
|
||||
+ name: "empty unpack destation path",
|
||||
+ args: args{
|
||||
+ src: tarName,
|
||||
+ com: archive.Gzip,
|
||||
+ rm: false,
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "unpack src path not exist",
|
||||
+ args: args{
|
||||
+ src: "path not exist",
|
||||
+ dest: unpackDest,
|
||||
+ com: archive.Gzip,
|
||||
+ rm: false,
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "unpack destation path not exist",
|
||||
+ args: args{
|
||||
+ src: tarName,
|
||||
+ dest: "path not exist",
|
||||
+ com: archive.Gzip,
|
||||
+ rm: false,
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "invalid tarball",
|
||||
+ args: args{
|
||||
+ src: invalidTar,
|
||||
+ dest: unpackDest,
|
||||
+ com: archive.Gzip,
|
||||
+ rm: false,
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "no permission for src",
|
||||
+ args: args{
|
||||
+ src: tarName,
|
||||
+ dest: unpackDest,
|
||||
+ com: archive.Gzip,
|
||||
+ rm: true,
|
||||
+ },
|
||||
+ wantErr: true,
|
||||
+ needPreHook: true,
|
||||
+ needPostHook: true,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ if tt.needPreHook {
|
||||
+ assert.NilError(t, Immutable(tt.args.src, true))
|
||||
+ }
|
||||
+ err := UnpackFile(tt.args.src, tt.args.dest, tt.args.com, tt.args.rm)
|
||||
+ if (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("UnpackFile() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ }
|
||||
+ if tt.needPostHook {
|
||||
+ assert.NilError(t, Immutable(tt.args.src, false))
|
||||
+ }
|
||||
+ if tt.args.rm && err == nil {
|
||||
+ tarName := filepath.Join(pwd, "test.tar")
|
||||
+ assert.NilError(t, PackFiles(folderToBePacked.root, tarName, archive.Gzip, true))
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
--
|
||||
2.27.0
|
||||
|
||||
391
patch/0081-test-cleancode-test-for-better-experience.patch
Normal file
391
patch/0081-test-cleancode-test-for-better-experience.patch
Normal file
@ -0,0 +1,391 @@
|
||||
From ed8d2d30e7d298fa05395a79cc3502240d9c0721 Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Mon, 1 Nov 2021 16:24:14 +0800
|
||||
Subject: [PATCH 13/16] test:cleancode test for better experience
|
||||
|
||||
change:
|
||||
1. shellcheck fix for scripts in used hack
|
||||
2. use busyobx instead of openeuler base image to speed up test
|
||||
3. add test-unit-cover, test-sdv-cover, test-cover for project to
|
||||
generate coverage files
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
Makefile | 18 ++++
|
||||
hack/all_coverage.sh | 26 ++++++
|
||||
hack/merge_coverage.sh | 55 +++++++++++
|
||||
hack/sdv_coverage.sh | 104 +++++++++++++++++++++
|
||||
hack/unit_test.sh | 82 +++++++++++-----
|
||||
tests/src/integration_test_set_new_root.sh | 7 +-
|
||||
6 files changed, 268 insertions(+), 24 deletions(-)
|
||||
create mode 100755 hack/all_coverage.sh
|
||||
create mode 100644 hack/merge_coverage.sh
|
||||
create mode 100755 hack/sdv_coverage.sh
|
||||
|
||||
diff --git a/Makefile b/Makefile
|
||||
index d41a9fdb..73482a41 100644
|
||||
--- a/Makefile
|
||||
+++ b/Makefile
|
||||
@@ -112,6 +112,24 @@ test-integration: ## Test integration case
|
||||
@./tests/test.sh integration
|
||||
@echo "Integration test done!"
|
||||
|
||||
+.PHONY: test-unit-cover
|
||||
+test-unit-cover: ## Test unit case and generate coverage
|
||||
+ @echo "Unit test cover starting..."
|
||||
+ @./hack/unit_test.sh cover
|
||||
+ @echo "Unit test cover done!"
|
||||
+
|
||||
+.PHONY: test-sdv-cover
|
||||
+test-sdv-cover: ## Test integration case and generate coverage
|
||||
+ @echo "Integration test cover starting..."
|
||||
+ @./hack/sdv_coverage.sh
|
||||
+ @echo "Integration test cover done!"
|
||||
+
|
||||
+.PHONY: test-cover
|
||||
+test-cover: test-sdv-cover test-unit-cover ## Test both unit and sdv case and generate unity coverage
|
||||
+ @echo "Test cover starting..."
|
||||
+ @./hack/all_coverage.sh
|
||||
+ @echo "Test cover done!"
|
||||
+
|
||||
##@ Development
|
||||
|
||||
.PHONY: build-image
|
||||
diff --git a/hack/all_coverage.sh b/hack/all_coverage.sh
|
||||
new file mode 100755
|
||||
index 00000000..9f9eb5ff
|
||||
--- /dev/null
|
||||
+++ b/hack/all_coverage.sh
|
||||
@@ -0,0 +1,26 @@
|
||||
+#!/bin/bash
|
||||
+
|
||||
+# Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
|
||||
+# isula-build licensed under the Mulan PSL v2.
|
||||
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
|
||||
+# You may obtain a copy of Mulan PSL v2 at:
|
||||
+# http://license.coscl.org.cn/MulanPSL2
|
||||
+# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
|
||||
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
|
||||
+# PURPOSE.
|
||||
+# See the Mulan PSL v2 for more details.
|
||||
+# Author: Xiang Li
|
||||
+# Create: 2020-03-01
|
||||
+# Description: shell script for all coverage
|
||||
+# Note: use this file by typing make test-cover
|
||||
+# Do not run this script directly
|
||||
+
|
||||
+SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" || exit; pwd)
|
||||
+# shellcheck disable=SC1091
|
||||
+source "${SCRIPT_DIR}"/merge_coverage.sh
|
||||
+
|
||||
+unit_coverage=${PWD}/cover_unit_test_all.out
|
||||
+sdv_coverage=${PWD}/cover_sdv_test_all.out
|
||||
+output_file=${PWD}/cover_test_all
|
||||
+
|
||||
+merge_cover "${output_file}" "${sdv_coverage}" "${unit_coverage}"
|
||||
diff --git a/hack/merge_coverage.sh b/hack/merge_coverage.sh
|
||||
new file mode 100644
|
||||
index 00000000..6e529a34
|
||||
--- /dev/null
|
||||
+++ b/hack/merge_coverage.sh
|
||||
@@ -0,0 +1,55 @@
|
||||
+#!/bin/bash
|
||||
+
|
||||
+# Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
|
||||
+# isula-build licensed under the Mulan PSL v2.
|
||||
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
|
||||
+# You may obtain a copy of Mulan PSL v2 at:
|
||||
+# http://license.coscl.org.cn/MulanPSL2
|
||||
+# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
|
||||
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
|
||||
+# PURPOSE.
|
||||
+# See the Mulan PSL v2 for more details.
|
||||
+# Author: Xiang Li
|
||||
+# Create: 2021-11-01
|
||||
+# Description: merge coverage from input coverage files
|
||||
+# Note: Do not run this script directly
|
||||
+
|
||||
+# Usage: merge_cover outputfile file1 file2 ... fileN
|
||||
+# Input: first: outputfile name
|
||||
+# remaining: coverage files
|
||||
+function merge_cover() {
|
||||
+ output_file_name="$1"
|
||||
+ input_coverages=( "${@:2}" )
|
||||
+
|
||||
+ output_coverage_file=${output_file_name}.out
|
||||
+ output_html_file=${output_file_name}.html
|
||||
+ output_merge_cover=${output_file_name}.merge
|
||||
+ grep -r -h -v "^mode:" "${input_coverages[@]}" | sort > "$output_merge_cover"
|
||||
+ current=""
|
||||
+ count=0
|
||||
+ echo "mode: set" > "$output_coverage_file"
|
||||
+ # read the cover report from merge_cover, convert it, write to final coverage
|
||||
+ while read -r line; do
|
||||
+ block=$(echo "$line" | cut -d ' ' -f1-2)
|
||||
+ num=$(echo "$line" | cut -d ' ' -f3)
|
||||
+ if [ "$current" == "" ]; then
|
||||
+ current=$block
|
||||
+ count=$num
|
||||
+ elif [ "$block" == "$current" ]; then
|
||||
+ count=$((count + num))
|
||||
+ else
|
||||
+ # if the sorted two lines are not in the same code block, write the statics result of last code block to the final coverage
|
||||
+ echo "$current" $count >> "${output_coverage_file}"
|
||||
+ current=$block
|
||||
+ count=$num
|
||||
+ fi
|
||||
+ done < "$output_merge_cover"
|
||||
+ rm -rf "${output_merge_cover}"
|
||||
+
|
||||
+ # merge the results of last line to the final coverage
|
||||
+ if [ "$current" != "" ]; then
|
||||
+ echo "$current" "$count" >> "${output_coverage_file}"
|
||||
+ fi
|
||||
+
|
||||
+ go tool cover -html="${output_coverage_file}" -o "$output_html_file"
|
||||
+}
|
||||
diff --git a/hack/sdv_coverage.sh b/hack/sdv_coverage.sh
|
||||
new file mode 100755
|
||||
index 00000000..874d9373
|
||||
--- /dev/null
|
||||
+++ b/hack/sdv_coverage.sh
|
||||
@@ -0,0 +1,104 @@
|
||||
+#!/bin/bash
|
||||
+
|
||||
+# Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
|
||||
+# isula-build licensed under the Mulan PSL v2.
|
||||
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
|
||||
+# You may obtain a copy of Mulan PSL v2 at:
|
||||
+# http://license.coscl.org.cn/MulanPSL2
|
||||
+# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
|
||||
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
|
||||
+# PURPOSE.
|
||||
+# See the Mulan PSL v2 for more details.
|
||||
+# Author: Xiang Li
|
||||
+# Create: 2020-03-01
|
||||
+# Description: shell script for coverage
|
||||
+# Note: use this file by typing make test-sdv-cover or make test-cover
|
||||
+# Do not run this script directly
|
||||
+
|
||||
+project_root=${PWD}
|
||||
+vendor_name="isula.org"
|
||||
+project_name="isula-build"
|
||||
+main_relative_path="cmd/daemon"
|
||||
+exclude_pattern="gopkgs|api/services"
|
||||
+go_test_mod_method="-mod=vendor"
|
||||
+go_test_count_method="-count=1"
|
||||
+go_test_cover_method="-covermode=set"
|
||||
+main_pkg="${vendor_name}/${project_name}/${main_relative_path}"
|
||||
+main_test_file=${project_root}/${main_relative_path}/main_test.go
|
||||
+main_file=${project_root}/${main_relative_path}/main.go
|
||||
+coverage_file=${project_root}/cover_sdv_test_all.out
|
||||
+coverage_html=${project_root}/cover_sdv_test_all.html
|
||||
+coverage_log=${project_root}/cover_sdv_test_all.log
|
||||
+main_test_binary_file=${project_root}/main.test
|
||||
+
|
||||
+function precheck() {
|
||||
+ if pgrep isula-builder > /dev/null 2>&1; then
|
||||
+ echo "isula-builder is already running, please stop it first"
|
||||
+ exit 1
|
||||
+ fi
|
||||
+}
|
||||
+
|
||||
+function modify_main_test() {
|
||||
+ # first backup file
|
||||
+ cp "${main_file}" "${main_file}".bk
|
||||
+ cp "${main_test_file}" "${main_test_file}".bk
|
||||
+ # delete Args field for main.go
|
||||
+ local comment_pattern="Args: util.NoArgs"
|
||||
+ sed -i "/$comment_pattern/s/^#*/\/\/ /" "${main_file}"
|
||||
+ # add new line for main_test.go
|
||||
+ code_snippet="func TestMain(t *testing.T) { main() }"
|
||||
+ echo "$code_snippet" >> "${main_test_file}"
|
||||
+}
|
||||
+
|
||||
+function recover_main_test() {
|
||||
+ mv "${main_file}".bk "${main_file}"
|
||||
+ mv "${main_test_file}".bk "${main_test_file}"
|
||||
+}
|
||||
+
|
||||
+function build_main_test_binary() {
|
||||
+ pkgs=$(go list ${go_test_mod_method} "${project_root}"/... | grep -Ev ${exclude_pattern} | tr "\r\n" ",")
|
||||
+ go test -coverpkg="${pkgs}" ${main_pkg} ${go_test_mod_method} ${go_test_cover_method} ${go_test_count_method} -c -o="${main_test_binary_file}"
|
||||
+}
|
||||
+
|
||||
+function run_main_test_binary() {
|
||||
+ ${main_test_binary_file} -test.coverprofile="${coverage_file}" > "${coverage_log}" 2>&1 &
|
||||
+ main_test_pid=$!
|
||||
+ for _ in $(seq 1 10); do
|
||||
+ if isula-build info > /dev/null 2>&1; then
|
||||
+ break
|
||||
+ else
|
||||
+ sleep 1
|
||||
+ fi
|
||||
+ done
|
||||
+}
|
||||
+
|
||||
+function run_coverage_test() {
|
||||
+ # do cover tests
|
||||
+ echo "sdv coverage test"
|
||||
+ # cover_test_xxx
|
||||
+ # cover_test_xxx
|
||||
+ # cover_test_xxx
|
||||
+ # cover_test_xxx
|
||||
+}
|
||||
+
|
||||
+function finish_coverage_test() {
|
||||
+ kill -15 $main_test_pid
|
||||
+}
|
||||
+
|
||||
+function generate_coverage() {
|
||||
+ go tool cover -html="${coverage_file}" -o="${coverage_html}"
|
||||
+}
|
||||
+
|
||||
+function cleanup() {
|
||||
+ rm "$main_test_binary_file"
|
||||
+}
|
||||
+
|
||||
+precheck
|
||||
+modify_main_test
|
||||
+build_main_test_binary
|
||||
+recover_main_test
|
||||
+run_main_test_binary
|
||||
+run_coverage_test
|
||||
+finish_coverage_test
|
||||
+generate_coverage
|
||||
+cleanup
|
||||
diff --git a/hack/unit_test.sh b/hack/unit_test.sh
|
||||
index a94a2d38..94a44a95 100755
|
||||
--- a/hack/unit_test.sh
|
||||
+++ b/hack/unit_test.sh
|
||||
@@ -12,32 +12,72 @@
|
||||
# Author: iSula Team
|
||||
# Create: 2020-07-11
|
||||
# Description: go test script
|
||||
+# Note: use this file by typing make unit-test or make unit-test-cover
|
||||
+# Do not run this script directly
|
||||
+
|
||||
+SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" || exit; pwd)
|
||||
+# shellcheck disable=SC1091
|
||||
+source "${SCRIPT_DIR}"/merge_coverage.sh
|
||||
|
||||
export GO111MODULE=on
|
||||
+run_coverage=$1
|
||||
+covers_folder=${PWD}/covers
|
||||
+testlog=${PWD}"/unit_test_log"
|
||||
+exclude_pattern="gopkgs|api/services"
|
||||
+go_test_mod_method="-mod=vendor"
|
||||
+go_test_count_method="-count=1"
|
||||
+go_test_timeout_flag="-timeout=300s"
|
||||
+go_test_race_flag="-race"
|
||||
|
||||
-TEST_ARGS=""
|
||||
-if [ ! -z "${TEST_REG}" ]; then
|
||||
- TEST_ARGS+=" -args TEST_REG=${TEST_REG}"
|
||||
-fi
|
||||
-if [ ! -z "${SKIP_REG}" ]; then
|
||||
- TEST_ARGS+=" -args SKIP_REG=${SKIP_REG}"
|
||||
-fi
|
||||
-echo "Testing with args ${TEST_ARGS}"
|
||||
+function precheck() {
|
||||
+ if pgrep isula-builder > /dev/null 2>&1; then
|
||||
+ echo "isula-builder is already running, please stop it first"
|
||||
+ exit 1
|
||||
+ fi
|
||||
+}
|
||||
|
||||
-testlog=${PWD}"/unit_test_log"
|
||||
-rm -f "${testlog}"
|
||||
-touch "${testlog}"
|
||||
-golist=$(go list ./... | grep -v gopkgs)
|
||||
-for path in ${golist}; do
|
||||
- echo "Start to test: ${path}"
|
||||
- # TEST_ARGS is " -args SKIP_REG=foo", so no double quote for it
|
||||
- go test -race -mod=vendor -cover -count=1 -timeout 300s -v "${path}" ${TEST_ARGS} >> "${testlog}"
|
||||
- cat "${testlog}" | grep -E -- "--- FAIL:|^FAIL"
|
||||
- if [ $? -eq 0 ]; then
|
||||
+function run_unit_test() {
|
||||
+ TEST_ARGS=""
|
||||
+ if [ -n "${TEST_REG}" ]; then
|
||||
+ TEST_ARGS+=" -args TEST_REG=${TEST_REG}"
|
||||
+ fi
|
||||
+ if [ -n "${SKIP_REG}" ]; then
|
||||
+ TEST_ARGS+=" -args SKIP_REG=${SKIP_REG}"
|
||||
+ fi
|
||||
+ echo "Testing with args ${TEST_ARGS}"
|
||||
+
|
||||
+ rm -f "${testlog}"
|
||||
+ if [[ -n $run_coverage ]]; then
|
||||
+ mkdir -p "${covers_folder}"
|
||||
+ fi
|
||||
+ for package in $(go list ${go_test_mod_method} ./... | grep -Ev ${exclude_pattern}); do
|
||||
+ echo "Start to test: ${package}"
|
||||
+ if [[ -n $run_coverage ]]; then
|
||||
+ coverprofile_file="${covers_folder}/$(echo "$package" | tr / -).cover"
|
||||
+ coverprofile_flag="-coverprofile=${coverprofile_file}"
|
||||
+ go_test_covermode_flag="-covermode=set"
|
||||
+ go_test_race_flag=""
|
||||
+ fi
|
||||
+ # TEST_ARGS is " -args SKIP_REG=foo", so no double quote for it
|
||||
+ # shellcheck disable=SC2086
|
||||
+ go test -v ${go_test_race_flag} ${go_test_mod_method} ${coverprofile_flag} ${go_test_covermode_flag} -coverpkg=${package} ${go_test_count_method} ${go_test_timeout_flag} "${package}" ${TEST_ARGS} >> "${testlog}"
|
||||
+ done
|
||||
+
|
||||
+ if grep -E -- "--- FAIL:|^FAIL" "${testlog}"; then
|
||||
echo "Testing failed... Please check ${testlog}"
|
||||
- exit 1
|
||||
fi
|
||||
tail -n 1 "${testlog}"
|
||||
-done
|
||||
|
||||
-rm -f "${testlog}"
|
||||
+ rm -f "${testlog}"
|
||||
+}
|
||||
+
|
||||
+function generate_unit_test_coverage() {
|
||||
+ if [[ -n ${run_coverage} ]]; then
|
||||
+ merge_cover "cover_unit_test_all" "${covers_folder}"
|
||||
+ rm -rf "${covers_folder}"
|
||||
+ fi
|
||||
+}
|
||||
+
|
||||
+precheck
|
||||
+run_unit_test
|
||||
+generate_unit_test_coverage
|
||||
diff --git a/tests/src/integration_test_set_new_root.sh b/tests/src/integration_test_set_new_root.sh
|
||||
index bb11a080..ae8d436b 100644
|
||||
--- a/tests/src/integration_test_set_new_root.sh
|
||||
+++ b/tests/src/integration_test_set_new_root.sh
|
||||
@@ -12,6 +12,7 @@
|
||||
# Author: Weizheng Xing
|
||||
# Create: 2021-05-29
|
||||
# Description: test set new run and data root in configuration.toml
|
||||
+# History: 2021-8-18 Xiang Li <lixiang172@huawei.com> use busyobx instead of openeuler base image to speed up test
|
||||
|
||||
top_dir=$(git rev-parse --show-toplevel)
|
||||
# shellcheck disable=SC1091
|
||||
@@ -20,7 +21,7 @@ source "$top_dir"/tests/lib/common.sh
|
||||
run_root="/var/run/new-isula-build"
|
||||
data_root="/var/lib/new-isula-build"
|
||||
config_file="/etc/isula-build/configuration.toml"
|
||||
-base_image="hub.oepkgs.net/openeuler/openeuler:21.03"
|
||||
+image="hub.oepkgs.net/openeuler/busybox:latest"
|
||||
|
||||
function clean()
|
||||
{
|
||||
@@ -47,10 +48,10 @@ function pre_test()
|
||||
function do_test()
|
||||
{
|
||||
tree_node_befor=$(tree -L 3 $data_root | wc -l)
|
||||
- run_with_debug "isula-build ctr-img pull $base_image"
|
||||
+ run_with_debug "isula-build ctr-img pull $image"
|
||||
tree_node_after=$(tree -L 3 $data_root | wc -l)
|
||||
|
||||
- if [ $((tree_node_after - tree_node_befor)) -eq 8 ] && run_with_debug "isula-build ctr-img rm $base_image"; then
|
||||
+ if [ $((tree_node_after - tree_node_befor)) -eq 8 ] && run_with_debug "isula-build ctr-img rm $image"; then
|
||||
echo "PASS"
|
||||
else
|
||||
echo "Sets of run and data root are not effective"
|
||||
--
|
||||
2.27.0
|
||||
|
||||
253
patch/0082-test-optimize-scripts-in-hack.patch
Normal file
253
patch/0082-test-optimize-scripts-in-hack.patch
Normal file
@ -0,0 +1,253 @@
|
||||
From 5d3a9a0f2e5510e68040d252190070925ee89fd0 Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Mon, 1 Nov 2021 23:37:44 +0800
|
||||
Subject: [PATCH 14/16] test: optimize scripts in hack
|
||||
|
||||
reason:
|
||||
1. add framework for integration tests
|
||||
2. shellcheck for scripts
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
Makefile | 10 ++---
|
||||
hack/all_coverage.sh | 4 +-
|
||||
...dv_coverage.sh => integration_coverage.sh} | 42 ++++++++++++-------
|
||||
hack/merge_coverage.sh | 30 ++++++-------
|
||||
hack/unit_test.sh | 10 ++---
|
||||
5 files changed, 53 insertions(+), 43 deletions(-)
|
||||
rename hack/{sdv_coverage.sh => integration_coverage.sh} (63%)
|
||||
|
||||
diff --git a/Makefile b/Makefile
|
||||
index 73482a41..c5384e07 100644
|
||||
--- a/Makefile
|
||||
+++ b/Makefile
|
||||
@@ -42,7 +42,7 @@ endif
|
||||
##@ Help
|
||||
.PHONY: help
|
||||
help: ## Display the help info
|
||||
- @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-20s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
+ @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-25s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
##@ Build
|
||||
|
||||
@@ -118,14 +118,14 @@ test-unit-cover: ## Test unit case and generate coverage
|
||||
@./hack/unit_test.sh cover
|
||||
@echo "Unit test cover done!"
|
||||
|
||||
-.PHONY: test-sdv-cover
|
||||
-test-sdv-cover: ## Test integration case and generate coverage
|
||||
+.PHONY: test-integration-cover
|
||||
+test-integration-cover: ## Test integration case and generate coverage
|
||||
@echo "Integration test cover starting..."
|
||||
- @./hack/sdv_coverage.sh
|
||||
+ @./hack/integration_coverage.sh
|
||||
@echo "Integration test cover done!"
|
||||
|
||||
.PHONY: test-cover
|
||||
-test-cover: test-sdv-cover test-unit-cover ## Test both unit and sdv case and generate unity coverage
|
||||
+test-cover: test-integration-cover test-unit-cover ## Test both unit and integration case and generate unity coverage
|
||||
@echo "Test cover starting..."
|
||||
@./hack/all_coverage.sh
|
||||
@echo "Test cover done!"
|
||||
diff --git a/hack/all_coverage.sh b/hack/all_coverage.sh
|
||||
index 9f9eb5ff..0f23e9d4 100755
|
||||
--- a/hack/all_coverage.sh
|
||||
+++ b/hack/all_coverage.sh
|
||||
@@ -20,7 +20,7 @@ SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" || exit; pwd)
|
||||
source "${SCRIPT_DIR}"/merge_coverage.sh
|
||||
|
||||
unit_coverage=${PWD}/cover_unit_test_all.out
|
||||
-sdv_coverage=${PWD}/cover_sdv_test_all.out
|
||||
+integration_coverage=${PWD}/cover_integration_test_all.out
|
||||
output_file=${PWD}/cover_test_all
|
||||
|
||||
-merge_cover "${output_file}" "${sdv_coverage}" "${unit_coverage}"
|
||||
+merge_cover "${output_file}" "${integration_coverage}" "${unit_coverage}"
|
||||
diff --git a/hack/sdv_coverage.sh b/hack/integration_coverage.sh
|
||||
similarity index 63%
|
||||
rename from hack/sdv_coverage.sh
|
||||
rename to hack/integration_coverage.sh
|
||||
index 874d9373..7462c545 100755
|
||||
--- a/hack/sdv_coverage.sh
|
||||
+++ b/hack/integration_coverage.sh
|
||||
@@ -12,7 +12,7 @@
|
||||
# Author: Xiang Li
|
||||
# Create: 2020-03-01
|
||||
# Description: shell script for coverage
|
||||
-# Note: use this file by typing make test-sdv-cover or make test-cover
|
||||
+# Note: use this file by typing make test-integration-cover or make test-cover
|
||||
# Do not run this script directly
|
||||
|
||||
project_root=${PWD}
|
||||
@@ -26,9 +26,10 @@ go_test_cover_method="-covermode=set"
|
||||
main_pkg="${vendor_name}/${project_name}/${main_relative_path}"
|
||||
main_test_file=${project_root}/${main_relative_path}/main_test.go
|
||||
main_file=${project_root}/${main_relative_path}/main.go
|
||||
-coverage_file=${project_root}/cover_sdv_test_all.out
|
||||
-coverage_html=${project_root}/cover_sdv_test_all.html
|
||||
-coverage_log=${project_root}/cover_sdv_test_all.log
|
||||
+coverage_file=${project_root}/cover_integration_test_all.out
|
||||
+coverage_html=${project_root}/cover_integration_test_all.html
|
||||
+coverage_daemon_log=${project_root}/cover_integration_test_all_daemon.log
|
||||
+coverage_client_log=${project_root}/cover_integration_test_all_client.log
|
||||
main_test_binary_file=${project_root}/main.test
|
||||
|
||||
function precheck() {
|
||||
@@ -44,10 +45,10 @@ function modify_main_test() {
|
||||
cp "${main_test_file}" "${main_test_file}".bk
|
||||
# delete Args field for main.go
|
||||
local comment_pattern="Args: util.NoArgs"
|
||||
- sed -i "/$comment_pattern/s/^#*/\/\/ /" "${main_file}"
|
||||
+ sed -i "/${comment_pattern}/s/^#*/\/\/ /" "${main_file}"
|
||||
# add new line for main_test.go
|
||||
code_snippet="func TestMain(t *testing.T) { main() }"
|
||||
- echo "$code_snippet" >> "${main_test_file}"
|
||||
+ echo "${code_snippet}" >> "${main_test_file}"
|
||||
}
|
||||
|
||||
function recover_main_test() {
|
||||
@@ -56,12 +57,12 @@ function recover_main_test() {
|
||||
}
|
||||
|
||||
function build_main_test_binary() {
|
||||
- pkgs=$(go list ${go_test_mod_method} "${project_root}"/... | grep -Ev ${exclude_pattern} | tr "\r\n" ",")
|
||||
- go test -coverpkg="${pkgs}" ${main_pkg} ${go_test_mod_method} ${go_test_cover_method} ${go_test_count_method} -c -o="${main_test_binary_file}"
|
||||
+ pkgs=$(go list "${go_test_mod_method}" "${project_root}"/... | grep -Ev "${exclude_pattern}" | tr "\r\n" ",")
|
||||
+ go test -coverpkg="${pkgs}" "${main_pkg}" "${go_test_mod_method}" "${go_test_cover_method}" "${go_test_count_method}" -c -o="${main_test_binary_file}" > /dev/null 2>&1
|
||||
}
|
||||
|
||||
function run_main_test_binary() {
|
||||
- ${main_test_binary_file} -test.coverprofile="${coverage_file}" > "${coverage_log}" 2>&1 &
|
||||
+ ${main_test_binary_file} -test.coverprofile="${coverage_file}" > "${coverage_daemon_log}" 2>&1 &
|
||||
main_test_pid=$!
|
||||
for _ in $(seq 1 10); do
|
||||
if isula-build info > /dev/null 2>&1; then
|
||||
@@ -74,15 +75,22 @@ function run_main_test_binary() {
|
||||
|
||||
function run_coverage_test() {
|
||||
# do cover tests
|
||||
- echo "sdv coverage test"
|
||||
- # cover_test_xxx
|
||||
- # cover_test_xxx
|
||||
- # cover_test_xxx
|
||||
- # cover_test_xxx
|
||||
+ while IFS= read -r testfile; do
|
||||
+ printf "%-60s" "test $(basename "${testfile}"): "
|
||||
+ echo -e "\n$(basename "${testfile}"):" >> "${coverage_client_log}"
|
||||
+ if ! bash "${testfile}" >> "${coverage_client_log}" 2>&1; then
|
||||
+ echo "FAIL"
|
||||
+ return_code=1
|
||||
+ else
|
||||
+ echo "PASS"
|
||||
+ fi
|
||||
+ done < <(find "${project_root}"/tests/src -maxdepth 1 -name "cover_test_*" -type f -print)
|
||||
+ # shellcheck disable=SC2248
|
||||
+ return ${return_code}
|
||||
}
|
||||
|
||||
function finish_coverage_test() {
|
||||
- kill -15 $main_test_pid
|
||||
+ kill -15 "${main_test_pid}"
|
||||
}
|
||||
|
||||
function generate_coverage() {
|
||||
@@ -90,7 +98,7 @@ function generate_coverage() {
|
||||
}
|
||||
|
||||
function cleanup() {
|
||||
- rm "$main_test_binary_file"
|
||||
+ rm "${main_test_binary_file}"
|
||||
}
|
||||
|
||||
precheck
|
||||
@@ -102,3 +110,5 @@ run_coverage_test
|
||||
finish_coverage_test
|
||||
generate_coverage
|
||||
cleanup
|
||||
+# shellcheck disable=SC2248
|
||||
+exit ${return_code}
|
||||
diff --git a/hack/merge_coverage.sh b/hack/merge_coverage.sh
|
||||
index 6e529a34..f043dfaf 100644
|
||||
--- a/hack/merge_coverage.sh
|
||||
+++ b/hack/merge_coverage.sh
|
||||
@@ -24,32 +24,32 @@ function merge_cover() {
|
||||
output_coverage_file=${output_file_name}.out
|
||||
output_html_file=${output_file_name}.html
|
||||
output_merge_cover=${output_file_name}.merge
|
||||
- grep -r -h -v "^mode:" "${input_coverages[@]}" | sort > "$output_merge_cover"
|
||||
+ grep -r -h -v "^mode:" "${input_coverages[@]}" | sort > "${output_merge_cover}"
|
||||
current=""
|
||||
count=0
|
||||
- echo "mode: set" > "$output_coverage_file"
|
||||
+ echo "mode: set" > "${output_coverage_file}"
|
||||
# read the cover report from merge_cover, convert it, write to final coverage
|
||||
while read -r line; do
|
||||
- block=$(echo "$line" | cut -d ' ' -f1-2)
|
||||
- num=$(echo "$line" | cut -d ' ' -f3)
|
||||
- if [ "$current" == "" ]; then
|
||||
- current=$block
|
||||
- count=$num
|
||||
- elif [ "$block" == "$current" ]; then
|
||||
+ block=$(echo "${line}" | cut -d ' ' -f1-2)
|
||||
+ num=$(echo "${line}" | cut -d ' ' -f3)
|
||||
+ if [ "${current}" == "" ]; then
|
||||
+ current=${block}
|
||||
+ count=${num}
|
||||
+ elif [ "${block}" == "${current}" ]; then
|
||||
count=$((count + num))
|
||||
else
|
||||
# if the sorted two lines are not in the same code block, write the statics result of last code block to the final coverage
|
||||
- echo "$current" $count >> "${output_coverage_file}"
|
||||
- current=$block
|
||||
- count=$num
|
||||
+ echo "${current} ${count}" >> "${output_coverage_file}"
|
||||
+ current=${block}
|
||||
+ count=${num}
|
||||
fi
|
||||
- done < "$output_merge_cover"
|
||||
+ done < "${output_merge_cover}"
|
||||
rm -rf "${output_merge_cover}"
|
||||
|
||||
# merge the results of last line to the final coverage
|
||||
- if [ "$current" != "" ]; then
|
||||
- echo "$current" "$count" >> "${output_coverage_file}"
|
||||
+ if [ "${current}" != "" ]; then
|
||||
+ echo "${current} ${count}" >> "${output_coverage_file}"
|
||||
fi
|
||||
|
||||
- go tool cover -html="${output_coverage_file}" -o "$output_html_file"
|
||||
+ go tool cover -html="${output_coverage_file}" -o "${output_html_file}"
|
||||
}
|
||||
diff --git a/hack/unit_test.sh b/hack/unit_test.sh
|
||||
index 94a44a95..161feb6b 100755
|
||||
--- a/hack/unit_test.sh
|
||||
+++ b/hack/unit_test.sh
|
||||
@@ -47,20 +47,20 @@ function run_unit_test() {
|
||||
echo "Testing with args ${TEST_ARGS}"
|
||||
|
||||
rm -f "${testlog}"
|
||||
- if [[ -n $run_coverage ]]; then
|
||||
+ if [[ -n ${run_coverage} ]]; then
|
||||
mkdir -p "${covers_folder}"
|
||||
fi
|
||||
- for package in $(go list ${go_test_mod_method} ./... | grep -Ev ${exclude_pattern}); do
|
||||
+ for package in $(go list "${go_test_mod_method}" ./... | grep -Ev "${exclude_pattern}"); do
|
||||
echo "Start to test: ${package}"
|
||||
- if [[ -n $run_coverage ]]; then
|
||||
- coverprofile_file="${covers_folder}/$(echo "$package" | tr / -).cover"
|
||||
+ if [[ -n ${run_coverage} ]]; then
|
||||
+ coverprofile_file="${covers_folder}/$(echo "${package}" | tr / -).cover"
|
||||
coverprofile_flag="-coverprofile=${coverprofile_file}"
|
||||
go_test_covermode_flag="-covermode=set"
|
||||
go_test_race_flag=""
|
||||
fi
|
||||
# TEST_ARGS is " -args SKIP_REG=foo", so no double quote for it
|
||||
# shellcheck disable=SC2086
|
||||
- go test -v ${go_test_race_flag} ${go_test_mod_method} ${coverprofile_flag} ${go_test_covermode_flag} -coverpkg=${package} ${go_test_count_method} ${go_test_timeout_flag} "${package}" ${TEST_ARGS} >> "${testlog}"
|
||||
+ go test -v ${go_test_race_flag} "${go_test_mod_method}" ${coverprofile_flag} "${go_test_covermode_flag}" -coverpkg=${package} "${go_test_count_method}" "${go_test_timeout_flag}" "${package}" ${TEST_ARGS} >> "${testlog}"
|
||||
done
|
||||
|
||||
if grep -E -- "--- FAIL:|^FAIL" "${testlog}"; then
|
||||
--
|
||||
2.27.0
|
||||
|
||||
@ -0,0 +1,124 @@
|
||||
From 08ebd389b5e3bb5104035c36891f8add75e18f57 Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Mon, 1 Nov 2021 23:37:56 +0800
|
||||
Subject: [PATCH 15/16] test: add common function for testing separated image
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
tests/lib/separator.sh | 104 +++++++++++++++++++++++++++++++++++++++++
|
||||
1 file changed, 104 insertions(+)
|
||||
create mode 100644 tests/lib/separator.sh
|
||||
|
||||
diff --git a/tests/lib/separator.sh b/tests/lib/separator.sh
|
||||
new file mode 100644
|
||||
index 00000000..ad05eb55
|
||||
--- /dev/null
|
||||
+++ b/tests/lib/separator.sh
|
||||
@@ -0,0 +1,104 @@
|
||||
+#!/bin/bash
|
||||
+
|
||||
+# Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
|
||||
+# isula-build licensed under the Mulan PSL v2.
|
||||
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
|
||||
+# You may obtain a copy of Mulan PSL v2 at:
|
||||
+# http://license.coscl.org.cn/MulanPSL2
|
||||
+# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
|
||||
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
|
||||
+# PURPOSE.
|
||||
+# See the Mulan PSL v2 for more details.
|
||||
+# Author: Xiang Li
|
||||
+# Create: 2021-11-01
|
||||
+# Description: common function for save/load separated image
|
||||
+
|
||||
+exit_flag=0
|
||||
+
|
||||
+# $1: from image name
|
||||
+# $2: build image name
|
||||
+# $3: layers number
|
||||
+# $4: Dockerfile path
|
||||
+function touch_dockerfile() {
|
||||
+ cat > "$4" << EOF
|
||||
+FROM $1
|
||||
+MAINTAINER DCCooper
|
||||
+EOF
|
||||
+ for i in $(seq "$3"); do
|
||||
+ echo "RUN echo \"This is $2 layer ${i}: ${RANDOM}\" > line.${i}" >> "$4"
|
||||
+ done
|
||||
+}
|
||||
+
|
||||
+# $1: from image name
|
||||
+# $2: build image name
|
||||
+# $3: layers number
|
||||
+# $4: Dockerfile path
|
||||
+function touch_bad_dockerfile() {
|
||||
+ cat > "$4" << EOF
|
||||
+FROM $1
|
||||
+MAINTAINER DCCooper
|
||||
+EOF
|
||||
+ for i in $(seq "$3"); do
|
||||
+ echo "RUN echo \"This is $2 layer ${i}: ${RANDOM}\"" >> "$4"
|
||||
+ done
|
||||
+}
|
||||
+
|
||||
+# $1: image name
|
||||
+# $2: context dir
|
||||
+function build_image() {
|
||||
+ isula-build ctr-img build -t "$1" "$2"
|
||||
+}
|
||||
+
|
||||
+function touch_rename_json() {
|
||||
+ cat > "$1" << EOF
|
||||
+[
|
||||
+ {
|
||||
+ "name": "app1_latest_app_image.tar.gz",
|
||||
+ "rename": "app1.tar.gz"
|
||||
+ },
|
||||
+ {
|
||||
+ "name": "app2_latest_app_image.tar.gz",
|
||||
+ "rename": "app2.tar.gz"
|
||||
+ },
|
||||
+ {
|
||||
+ "name": "app1_latest_base_image.tar.gz",
|
||||
+ "rename": "base1.tar.gz"
|
||||
+ },
|
||||
+ {
|
||||
+ "name": "app2_latest_base_image.tar.gz",
|
||||
+ "rename": "base2.tar.gz"
|
||||
+ },
|
||||
+ {
|
||||
+ "name": "app1_latest_lib_image.tar.gz",
|
||||
+ "rename": "lib1.tar.gz"
|
||||
+ },
|
||||
+ {
|
||||
+ "name": "app2_latest_lib_image.tar.gz",
|
||||
+ "rename": "lib2.tar.gz"
|
||||
+ }
|
||||
+]
|
||||
+EOF
|
||||
+}
|
||||
+
|
||||
+function touch_bad_rename_json() {
|
||||
+ touch_rename_json "$1"
|
||||
+ sed -i '2d' "$1"
|
||||
+}
|
||||
+
|
||||
+function check_result_equal() {
|
||||
+ if [[ $1 -eq $2 ]]; then
|
||||
+ return 0
|
||||
+ else
|
||||
+ ((exit_flag++))
|
||||
+ return 1
|
||||
+ fi
|
||||
+}
|
||||
+
|
||||
+function check_result_not_equal() {
|
||||
+ if [[ $1 -ne $2 ]]; then
|
||||
+ return 0
|
||||
+ else
|
||||
+ ((exit_flag++))
|
||||
+ return 1
|
||||
+ fi
|
||||
+}
|
||||
--
|
||||
2.27.0
|
||||
|
||||
@ -0,0 +1,523 @@
|
||||
From b3e96588a3e236cec8ec5e62a1fb884cf2eabc80 Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Mon, 1 Nov 2021 23:38:06 +0800
|
||||
Subject: [PATCH 16/16] test: add integration tests for saving and loading
|
||||
separated image
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
.../cover_test_load_separated_image_failed.sh | 145 ++++++++++++++++++
|
||||
...cover_test_load_separated_image_success.sh | 89 +++++++++++
|
||||
.../cover_test_save_separated_image_failed.sh | 107 +++++++++++++
|
||||
...cover_test_save_separated_image_success.sh | 54 +++++++
|
||||
...r_test_save_separated_image_with_rename.sh | 75 +++++++++
|
||||
5 files changed, 470 insertions(+)
|
||||
create mode 100644 tests/src/cover_test_load_separated_image_failed.sh
|
||||
create mode 100644 tests/src/cover_test_load_separated_image_success.sh
|
||||
create mode 100644 tests/src/cover_test_save_separated_image_failed.sh
|
||||
create mode 100644 tests/src/cover_test_save_separated_image_success.sh
|
||||
create mode 100644 tests/src/cover_test_save_separated_image_with_rename.sh
|
||||
|
||||
diff --git a/tests/src/cover_test_load_separated_image_failed.sh b/tests/src/cover_test_load_separated_image_failed.sh
|
||||
new file mode 100644
|
||||
index 00000000..26590d0c
|
||||
--- /dev/null
|
||||
+++ b/tests/src/cover_test_load_separated_image_failed.sh
|
||||
@@ -0,0 +1,145 @@
|
||||
+#!/bin/bash
|
||||
+
|
||||
+# Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
|
||||
+# isula-build licensed under the Mulan PSL v2.
|
||||
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
|
||||
+# You may obtain a copy of Mulan PSL v2 at:
|
||||
+# http://license.coscl.org.cn/MulanPSL2
|
||||
+# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
|
||||
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
|
||||
+# PURPOSE.
|
||||
+# See the Mulan PSL v2 for more details.
|
||||
+# Author: Xiang Li
|
||||
+# Create: 2021-11-01
|
||||
+# Description: cover test for load separated image
|
||||
+
|
||||
+test_name=${BASH_SOURCE[0]}
|
||||
+workspace=/tmp/${test_name}.$(date +%s)
|
||||
+mkdir -p "${workspace}"
|
||||
+dockerfile=${workspace}/Dockerfile
|
||||
+tarball_dir=${workspace}/Images
|
||||
+rename_json=${workspace}/rename.json
|
||||
+top_dir=$(git rev-parse --show-toplevel)
|
||||
+# shellcheck disable=SC1091
|
||||
+source "${top_dir}"/tests/lib/separator.sh
|
||||
+
|
||||
+function pre_run() {
|
||||
+ base_image_name="hub.oepkgs.net/library/busybox:latest"
|
||||
+ lib_image_name="lib:latest"
|
||||
+ app1_image_name="app1:latest"
|
||||
+ app2_image_name="app2:latest"
|
||||
+ lib_layer_number=5
|
||||
+ app1_layer_number=4
|
||||
+ app2_layer_number=3
|
||||
+ touch_dockerfile "${base_image_name}" "${lib_image_name}" "${lib_layer_number}" "${dockerfile}"
|
||||
+ build_image "${lib_image_name}" "${workspace}"
|
||||
+ touch_dockerfile "${lib_image_name}" "${app1_image_name}" "${app1_layer_number}" "${dockerfile}"
|
||||
+ build_image "${app1_image_name}" "${workspace}"
|
||||
+ touch_dockerfile "${lib_image_name}" "${app2_image_name}" "${app2_layer_number}" "${dockerfile}"
|
||||
+ build_image "${app2_image_name}" "${workspace}"
|
||||
+ touch_rename_json "${rename_json}"
|
||||
+ isula-build ctr-img save -b "${base_image_name}" -l "${lib_image_name}" -d "${tarball_dir}" "${app1_image_name}" "${app2_image_name}" -r "${rename_json}"
|
||||
+ check_result_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+# empty -d flag and missing -b
|
||||
+function test_run1() {
|
||||
+ isula-build ctr-img load -l "${tarball_dir}"/base1.tar.gz -i "${app1_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+# empty -d flag and missing -l
|
||||
+function test_run2() {
|
||||
+ isula-build ctr-img load -b "${tarball_dir}"/base1.tar.gz -i "${app1_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+# empty -d, -b, -l flag
|
||||
+function test_run3() {
|
||||
+ isula-build ctr-img load -i "${app1_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+# use lib as base image tarball
|
||||
+function test_run4() {
|
||||
+ isula-build ctr-img load -d "${tarball_dir}" -b "${tarball_dir}"/lib1.tar.gz -i "${app1_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+# missing app tarball
|
||||
+function test_run5() {
|
||||
+ mv "${tarball_dir}"/app1.tar.gz "${workspace}"
|
||||
+ isula-build ctr-img load -d "${tarball_dir}" -l "${tarball_dir}"/lib1.tar.gz -i "${app1_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ mv "${workspace}"/app1.tar.gz "${tarball_dir}"
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+# lib tarball not exist
|
||||
+function test_run6() {
|
||||
+ isula-build ctr-img load -d "${tarball_dir}" -l not_exist_lib.tar -i "${app1_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+# base tarball not exist
|
||||
+function test_run7() {
|
||||
+ isula-build ctr-img load -d "${tarball_dir}" -b not_exist_base.tar -i "${app1_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+# invalid base tarball
|
||||
+function test_run8() {
|
||||
+ invalid_tarball=${workspace}/base1.tar
|
||||
+ echo "invalid base tarball" >> "${invalid_tarball}"
|
||||
+ isula-build ctr-img load -d "${tarball_dir}" -b "${invalid_tarball}" -i "${app1_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ rm -rf "${invalid_tarball}"
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+# invalid lib tarball
|
||||
+function test_run9() {
|
||||
+ invalid_tarball=${workspace}/lib1.tar
|
||||
+ echo "invalid lib tarball" >> "${invalid_tarball}"
|
||||
+ isula-build ctr-img load -d "${tarball_dir}" -l "${invalid_tarball}" -i "${app1_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ rm -rf "${invalid_tarball}"
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+# manifest file corruption
|
||||
+function test_run10() {
|
||||
+ cp "${tarball_dir}"/manifest "${tarball_dir}"/manifest.bk
|
||||
+ sed -i "1d" "${tarball_dir}"/manifest
|
||||
+ isula-build ctr-img load -d "${tarball_dir}" -d "${tarball_dir}" -i "${app1_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ mv "${tarball_dir}"/manifest.bk "${tarball_dir}"/manifest
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+function cleanup() {
|
||||
+ rm -rf "${workspace}"
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+pre_run
|
||||
+test_run1
|
||||
+test_run2
|
||||
+test_run3
|
||||
+test_run4
|
||||
+test_run5
|
||||
+test_run6
|
||||
+test_run7
|
||||
+test_run8
|
||||
+test_run9
|
||||
+test_run10
|
||||
+cleanup
|
||||
+# shellcheck disable=SC2154
|
||||
+exit "${exit_flag}"
|
||||
diff --git a/tests/src/cover_test_load_separated_image_success.sh b/tests/src/cover_test_load_separated_image_success.sh
|
||||
new file mode 100644
|
||||
index 00000000..266b3eba
|
||||
--- /dev/null
|
||||
+++ b/tests/src/cover_test_load_separated_image_success.sh
|
||||
@@ -0,0 +1,89 @@
|
||||
+#!/bin/bash
|
||||
+
|
||||
+# Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
|
||||
+# isula-build licensed under the Mulan PSL v2.
|
||||
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
|
||||
+# You may obtain a copy of Mulan PSL v2 at:
|
||||
+# http://license.coscl.org.cn/MulanPSL2
|
||||
+# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
|
||||
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
|
||||
+# PURPOSE.
|
||||
+# See the Mulan PSL v2 for more details.
|
||||
+# Author: Xiang Li
|
||||
+# Create: 2021-11-01
|
||||
+# Description: cover test for load separated image
|
||||
+
|
||||
+test_name=${BASH_SOURCE[0]}
|
||||
+workspace=/tmp/${test_name}.$(date +%s)
|
||||
+mkdir -p "${workspace}"
|
||||
+dockerfile=${workspace}/Dockerfile
|
||||
+tarball_dir=${workspace}/Images
|
||||
+rename_json=${workspace}/rename.json
|
||||
+top_dir=$(git rev-parse --show-toplevel)
|
||||
+# shellcheck disable=SC1091
|
||||
+source "${top_dir}"/tests/lib/separator.sh
|
||||
+
|
||||
+function pre_run() {
|
||||
+ base_image_name="hub.oepkgs.net/library/busybox:latest"
|
||||
+ lib_image_name="lib:latest"
|
||||
+ app1_image_name="app1:latest"
|
||||
+ app2_image_name="app2:latest"
|
||||
+ lib_layer_number=5
|
||||
+ app1_layer_number=4
|
||||
+ app2_layer_number=3
|
||||
+ touch_dockerfile "${base_image_name}" "${lib_image_name}" "${lib_layer_number}" "${dockerfile}"
|
||||
+ build_image "${lib_image_name}" "${workspace}"
|
||||
+ touch_dockerfile "${lib_image_name}" "${app1_image_name}" "${app1_layer_number}" "${dockerfile}"
|
||||
+ build_image "${app1_image_name}" "${workspace}"
|
||||
+ touch_dockerfile "${lib_image_name}" "${app2_image_name}" "${app2_layer_number}" "${dockerfile}"
|
||||
+ build_image "${app2_image_name}" "${workspace}"
|
||||
+ touch_rename_json "${rename_json}"
|
||||
+ isula-build ctr-img save -b "${base_image_name}" -l "${lib_image_name}" -d "${tarball_dir}" "${app1_image_name}" "${app2_image_name}" -r "${rename_json}"
|
||||
+ check_result_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+function test_run1() {
|
||||
+ isula-build ctr-img load -d "${tarball_dir}" -b "${tarball_dir}"/base1.tar.gz -l "${tarball_dir}"/lib1.tar.gz -i "${app1_image_name}"
|
||||
+ check_result_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+function test_run2() {
|
||||
+ isula-build ctr-img load -d "${tarball_dir}" -b "${tarball_dir}"/base1.tar.gz -i "${app1_image_name}"
|
||||
+ check_result_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+function test_run3() {
|
||||
+ isula-build ctr-img load -d "${tarball_dir}" -l "${tarball_dir}"/lib1.tar.gz -i "${app1_image_name}"
|
||||
+ check_result_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+function test_run4() {
|
||||
+ isula-build ctr-img load -d "${tarball_dir}" -i "${app1_image_name}"
|
||||
+ check_result_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+function test_run5() {
|
||||
+ isula-build ctr-img load -d "${tarball_dir}" -i "${app1_image_name}" --no-check
|
||||
+ check_result_equal $? 0
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+function cleanup() {
|
||||
+ rm -rf "${workspace}"
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+pre_run
|
||||
+test_run1
|
||||
+test_run2
|
||||
+test_run3
|
||||
+test_run4
|
||||
+test_run5
|
||||
+cleanup
|
||||
+# shellcheck disable=SC2154
|
||||
+exit "${exit_flag}"
|
||||
diff --git a/tests/src/cover_test_save_separated_image_failed.sh b/tests/src/cover_test_save_separated_image_failed.sh
|
||||
new file mode 100644
|
||||
index 00000000..c64dcf5d
|
||||
--- /dev/null
|
||||
+++ b/tests/src/cover_test_save_separated_image_failed.sh
|
||||
@@ -0,0 +1,107 @@
|
||||
+#!/bin/bash
|
||||
+
|
||||
+# Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
|
||||
+# isula-build licensed under the Mulan PSL v2.
|
||||
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
|
||||
+# You may obtain a copy of Mulan PSL v2 at:
|
||||
+# http://license.coscl.org.cn/MulanPSL2
|
||||
+# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
|
||||
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
|
||||
+# PURPOSE.
|
||||
+# See the Mulan PSL v2 for more details.
|
||||
+# Author: Xiang Li
|
||||
+# Create: 2021-11-01
|
||||
+# Description: cover test for save separated image
|
||||
+
|
||||
+test_name=${BASH_SOURCE[0]}
|
||||
+workspace=/tmp/${test_name}.$(date +%s)
|
||||
+mkdir -p "${workspace}"
|
||||
+dockerfile=${workspace}/Dockerfile
|
||||
+top_dir=$(git rev-parse --show-toplevel)
|
||||
+# shellcheck disable=SC1091
|
||||
+source "${top_dir}"/tests/lib/separator.sh
|
||||
+
|
||||
+function pre_run() {
|
||||
+ base_image_name="hub.oepkgs.net/library/busybox:latest"
|
||||
+ bad_lib_image_name="lib:bad"
|
||||
+ bad_app1_image_name="app1:bad"
|
||||
+ bad_app2_image_name="app2:bad"
|
||||
+ lib_image_name="lib:latest"
|
||||
+ app1_image_name="app1:latest"
|
||||
+ app2_image_name="app2:latest"
|
||||
+ bad_lib_layer_number=5
|
||||
+ bad_app1_layer_number=4
|
||||
+ bad_app2_layer_number=3
|
||||
+ lib_layer_number=5
|
||||
+ app1_layer_number=6
|
||||
+ app2_layer_number=7
|
||||
+
|
||||
+ # build bad dockerfile
|
||||
+ touch_bad_dockerfile "${base_image_name}" "${bad_lib_image_name}" "${bad_lib_layer_number}" "${dockerfile}"
|
||||
+ build_image "${bad_lib_image_name}" "${workspace}"
|
||||
+ touch_bad_dockerfile "${bad_lib_image_name}" "${bad_app1_image_name}" "${bad_app1_layer_number}" "${dockerfile}"
|
||||
+ build_image "${bad_app1_image_name}" "${workspace}"
|
||||
+ touch_bad_dockerfile "${bad_lib_image_name}" "${bad_app2_image_name}" "${bad_app2_layer_number}" "${dockerfile}"
|
||||
+ build_image "${bad_app2_image_name}" "${workspace}"
|
||||
+
|
||||
+ # build normal dockerfile
|
||||
+ touch_dockerfile "${base_image_name}" "${lib_image_name}" "${lib_layer_number}" "${dockerfile}"
|
||||
+ build_image "${lib_image_name}" "${workspace}"
|
||||
+ touch_dockerfile "${lib_image_name}" "${app1_image_name}" "${app1_layer_number}" "${dockerfile}"
|
||||
+ build_image "${app1_image_name}" "${workspace}"
|
||||
+ touch_dockerfile "${lib_image_name}" "${app2_image_name}" "${app2_layer_number}" "${dockerfile}"
|
||||
+ build_image "${app2_image_name}" "${workspace}"
|
||||
+}
|
||||
+
|
||||
+function test_run1() {
|
||||
+ isula-build ctr-img save -b "${base_image_name}" -l "${bad_lib_image_name}" -d "${workspace}"/Images "${bad_app1_image_name}" "${bad_app2_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ rm -rf "${workspace}"/Images
|
||||
+}
|
||||
+
|
||||
+function test_run2() {
|
||||
+ isula-build ctr-img save -b "invalid:base" -l "${bad_lib_image_name}" -d "${workspace}"/Images "${bad_app1_image_name}" "${bad_app2_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ rm -rf "${workspace}"/Images
|
||||
+}
|
||||
+
|
||||
+function test_run3() {
|
||||
+ isula-build ctr-img save -b "${base_image_name}" -l "livalid:lib" -d "${workspace}"/Images "${bad_app1_image_name}" "${bad_app2_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ rm -rf "${workspace}"/Images
|
||||
+}
|
||||
+
|
||||
+function test_run4() {
|
||||
+ isula-build ctr-img save -b "${base_image_name}" -l "${bad_lib_image_name}" -d "${workspace}"/Images "invalid:app" "${bad_app2_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ rm -rf "${workspace}"/Images
|
||||
+}
|
||||
+
|
||||
+function test_run5() {
|
||||
+ isula-build ctr-img save -b "${base_image_name}" -l "${bad_lib_image_name}" -d "${workspace}"/Images "${app1_image_name}" "${app2_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ rm -rf "${workspace}"/Images
|
||||
+}
|
||||
+
|
||||
+function test_run6() {
|
||||
+ isula-build ctr-img save -b "${base_image_name}" -l "${lib_image_name}" -d "${workspace}"/Images "${bad_app1_image_name}" "${bad_app2_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ rm -rf "${workspace}"/Images
|
||||
+}
|
||||
+
|
||||
+function cleanup() {
|
||||
+ rm -rf "${workspace}"
|
||||
+ isula-build ctr-img rm "${bad_lib_image_name}" "${bad_app1_image_name}" "${bad_app2_image_name}" "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+ isula-build ctr-img rm -p
|
||||
+}
|
||||
+
|
||||
+pre_run
|
||||
+test_run1
|
||||
+test_run2
|
||||
+test_run3
|
||||
+test_run4
|
||||
+test_run5
|
||||
+test_run6
|
||||
+cleanup
|
||||
+# shellcheck disable=SC2154
|
||||
+exit "${exit_flag}"
|
||||
diff --git a/tests/src/cover_test_save_separated_image_success.sh b/tests/src/cover_test_save_separated_image_success.sh
|
||||
new file mode 100644
|
||||
index 00000000..2095bd33
|
||||
--- /dev/null
|
||||
+++ b/tests/src/cover_test_save_separated_image_success.sh
|
||||
@@ -0,0 +1,54 @@
|
||||
+#!/bin/bash
|
||||
+
|
||||
+# Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
|
||||
+# isula-build licensed under the Mulan PSL v2.
|
||||
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
|
||||
+# You may obtain a copy of Mulan PSL v2 at:
|
||||
+# http://license.coscl.org.cn/MulanPSL2
|
||||
+# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
|
||||
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
|
||||
+# PURPOSE.
|
||||
+# See the Mulan PSL v2 for more details.
|
||||
+# Author: Xiang Li
|
||||
+# Create: 2021-11-01
|
||||
+# Description: cover test for save separated image
|
||||
+
|
||||
+test_name=${BASH_SOURCE[0]}
|
||||
+workspace=/tmp/${test_name}.$(date +%s)
|
||||
+mkdir -p "${workspace}"
|
||||
+dockerfile=${workspace}/Dockerfile
|
||||
+top_dir=$(git rev-parse --show-toplevel)
|
||||
+# shellcheck disable=SC1091
|
||||
+source "${top_dir}"/tests/lib/separator.sh
|
||||
+
|
||||
+function pre_run() {
|
||||
+ base_image_name="hub.oepkgs.net/library/busybox:latest"
|
||||
+ lib_image_name="lib:latest"
|
||||
+ app1_image_name="app1:latest"
|
||||
+ app2_image_name="app2:latest"
|
||||
+ lib_layer_number=5
|
||||
+ app1_layer_number=4
|
||||
+ app2_layer_number=3
|
||||
+ touch_dockerfile "${base_image_name}" "${lib_image_name}" "${lib_layer_number}" "${dockerfile}"
|
||||
+ build_image "${lib_image_name}" "${workspace}"
|
||||
+ touch_dockerfile "${lib_image_name}" "${app1_image_name}" "${app1_layer_number}" "${dockerfile}"
|
||||
+ build_image "${app1_image_name}" "${workspace}"
|
||||
+ touch_dockerfile "${lib_image_name}" "${app2_image_name}" "${app2_layer_number}" "${dockerfile}"
|
||||
+ build_image "${app2_image_name}" "${workspace}"
|
||||
+}
|
||||
+
|
||||
+function test_run() {
|
||||
+ isula-build ctr-img save -b "${base_image_name}" -l "${lib_image_name}" -d "${workspace}"/Images "${app1_image_name}" "${app2_image_name}"
|
||||
+ check_result_equal $? 0
|
||||
+}
|
||||
+
|
||||
+function cleanup() {
|
||||
+ rm -rf "${workspace}"
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+pre_run
|
||||
+test_run
|
||||
+cleanup
|
||||
+# shellcheck disable=SC2154
|
||||
+exit "${exit_flag}"
|
||||
diff --git a/tests/src/cover_test_save_separated_image_with_rename.sh b/tests/src/cover_test_save_separated_image_with_rename.sh
|
||||
new file mode 100644
|
||||
index 00000000..28904757
|
||||
--- /dev/null
|
||||
+++ b/tests/src/cover_test_save_separated_image_with_rename.sh
|
||||
@@ -0,0 +1,75 @@
|
||||
+#!/bin/bash
|
||||
+
|
||||
+# Copyright (c) Huawei Technologies Co., Ltd. 2020. All rights reserved.
|
||||
+# isula-build licensed under the Mulan PSL v2.
|
||||
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
|
||||
+# You may obtain a copy of Mulan PSL v2 at:
|
||||
+# http://license.coscl.org.cn/MulanPSL2
|
||||
+# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
|
||||
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
|
||||
+# PURPOSE.
|
||||
+# See the Mulan PSL v2 for more details.
|
||||
+# Author: Xiang Li
|
||||
+# Create: 2021-11-01
|
||||
+# Description: cover test for save separated image
|
||||
+
|
||||
+test_name=${BASH_SOURCE[0]}
|
||||
+workspace=/tmp/${test_name}.$(date +%s)
|
||||
+mkdir -p "${workspace}"
|
||||
+dockerfile=${workspace}/Dockerfile
|
||||
+rename_json=${workspace}/rename.json
|
||||
+invalid_rename_json=${workspace}/invalid.json
|
||||
+none_exist_rename_json=${workspace}/none_exist.json
|
||||
+top_dir=$(git rev-parse --show-toplevel)
|
||||
+# shellcheck disable=SC1091
|
||||
+source "${top_dir}"/tests/lib/separator.sh
|
||||
+
|
||||
+function pre_run() {
|
||||
+ base_image_name="hub.oepkgs.net/library/busybox:latest"
|
||||
+ lib_image_name="lib:latest"
|
||||
+ app1_image_name="app1:latest"
|
||||
+ app2_image_name="app2:latest"
|
||||
+ lib_layer_number=5
|
||||
+ app1_layer_number=4
|
||||
+ app2_layer_number=3
|
||||
+ touch_dockerfile "${base_image_name}" "${lib_image_name}" "${lib_layer_number}" "${dockerfile}"
|
||||
+ build_image "${lib_image_name}" "${workspace}"
|
||||
+ touch_dockerfile "${lib_image_name}" "${app1_image_name}" "${app1_layer_number}" "${dockerfile}"
|
||||
+ build_image "${app1_image_name}" "${workspace}"
|
||||
+ touch_dockerfile "${lib_image_name}" "${app2_image_name}" "${app2_layer_number}" "${dockerfile}"
|
||||
+ build_image "${app2_image_name}" "${workspace}"
|
||||
+}
|
||||
+
|
||||
+function test_run1() {
|
||||
+ touch_rename_json "${rename_json}"
|
||||
+ isula-build ctr-img save -b "${base_image_name}" -l "${lib_image_name}" -d "${workspace}"/Images -r "${rename_json}" "${app1_image_name}" "${app2_image_name}"
|
||||
+ check_result_equal $? 0
|
||||
+ rm -rf "${workspace}"/Images
|
||||
+}
|
||||
+
|
||||
+function test_run2() {
|
||||
+ touch_bad_rename_json "${invalid_rename_json}"
|
||||
+ isula-build ctr-img save -b "${base_image_name}" -l "${lib_image_name}" -d "${workspace}"/Images -r "${invalid_rename_json}" "${app1_image_name}" "${app2_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ rm -rf "${workspace}"/Images
|
||||
+}
|
||||
+
|
||||
+function test_run3() {
|
||||
+ isula-build ctr-img save -b "${base_image_name}" -l "${lib_image_name}" -d "${workspace}"/Images -r "${none_exist_rename_json}" "${app1_image_name}" "${app2_image_name}"
|
||||
+ check_result_not_equal $? 0
|
||||
+ rm -rf "${workspace}"/Images
|
||||
+}
|
||||
+
|
||||
+function cleanup() {
|
||||
+ rm -rf "${workspace}"
|
||||
+ isula-build ctr-img rm "${lib_image_name}" "${app1_image_name}" "${app2_image_name}"
|
||||
+}
|
||||
+
|
||||
+
|
||||
+pre_run
|
||||
+test_run1
|
||||
+test_run2
|
||||
+test_run3
|
||||
+cleanup
|
||||
+# shellcheck disable=SC2154
|
||||
+exit "${exit_flag}"
|
||||
--
|
||||
2.27.0
|
||||
|
||||
463
patch/0085-util-add-unit-test-for-increment-util-functions.patch
Normal file
463
patch/0085-util-add-unit-test-for-increment-util-functions.patch
Normal file
@ -0,0 +1,463 @@
|
||||
From 133e789d445905f5d94a6c8cc3459b3729fb7335 Mon Sep 17 00:00:00 2001
|
||||
From: DCCooper <1866858@gmail.com>
|
||||
Date: Thu, 28 Oct 2021 18:55:24 +0800
|
||||
Subject: [PATCH 07/16] util: add unit test for increment util functions
|
||||
|
||||
Signed-off-by: DCCooper <1866858@gmail.com>
|
||||
---
|
||||
util/cipher.go | 6 +-
|
||||
util/cipher_test.go | 237 ++++++++++++++++++++++++++++++++++++++++++++
|
||||
util/common.go | 3 +-
|
||||
util/common_test.go | 131 ++++++++++++++++++++++++
|
||||
4 files changed, 373 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/util/cipher.go b/util/cipher.go
|
||||
index d92705c3..ce47b71e 100644
|
||||
--- a/util/cipher.go
|
||||
+++ b/util/cipher.go
|
||||
@@ -234,9 +234,6 @@ func ReadPublicKey(path string) (rsa.PublicKey, error) {
|
||||
|
||||
func hashFile(path string) (string, error) {
|
||||
cleanPath := filepath.Clean(path)
|
||||
- if len(cleanPath) == 0 {
|
||||
- return "", errors.New("failed to hash empty path")
|
||||
- }
|
||||
if f, err := os.Stat(cleanPath); err != nil {
|
||||
return "", errors.Errorf("failed to stat file %q", cleanPath)
|
||||
} else if f.IsDir() {
|
||||
@@ -282,6 +279,9 @@ func hashDir(path string) (string, error) {
|
||||
// the checksum will be concatenated to next checksum until every file
|
||||
// counted, the result will be used for final checksum calculation
|
||||
func SHA256Sum(path string) (string, error) {
|
||||
+ if len(path) == 0 {
|
||||
+ return "", errors.New("failed to hash empty path")
|
||||
+ }
|
||||
path = filepath.Clean(path)
|
||||
f, err := os.Stat(path)
|
||||
if err != nil {
|
||||
diff --git a/util/cipher_test.go b/util/cipher_test.go
|
||||
index 1c0d21c9..bab6dfe3 100644
|
||||
--- a/util/cipher_test.go
|
||||
+++ b/util/cipher_test.go
|
||||
@@ -19,12 +19,15 @@ import (
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"hash"
|
||||
+ "io/ioutil"
|
||||
+ "os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"gotest.tools/v3/assert"
|
||||
"gotest.tools/v3/fs"
|
||||
+ constant "isula.org/isula-build"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -216,3 +219,237 @@ func benchmarkGenerateRSAKey(scale int, b *testing.B) {
|
||||
func BenchmarkGenerateRSAKey2048(b *testing.B) { benchmarkGenerateRSAKey(2048, b) }
|
||||
func BenchmarkGenerateRSAKey3072(b *testing.B) { benchmarkGenerateRSAKey(3072, b) }
|
||||
func BenchmarkGenerateRSAKey4096(b *testing.B) { benchmarkGenerateRSAKey(4096, b) }
|
||||
+
|
||||
+func TestHashFile(t *testing.T) {
|
||||
+ emptyFile := fs.NewFile(t, t.Name())
|
||||
+ defer emptyFile.Remove()
|
||||
+ fileWithContent := fs.NewFile(t, t.Name())
|
||||
+ err := ioutil.WriteFile(fileWithContent.Path(), []byte("hello"), constant.DefaultRootFileMode)
|
||||
+ assert.NilError(t, err)
|
||||
+ defer fileWithContent.Remove()
|
||||
+ dir := fs.NewDir(t, t.Name())
|
||||
+ defer dir.Remove()
|
||||
+
|
||||
+ type args struct {
|
||||
+ path string
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ want string
|
||||
+ wantErr bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-hash empty file",
|
||||
+ args: args{path: emptyFile.Path()},
|
||||
+ // empty file sha256sum always is
|
||||
+ want: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-hash file with content",
|
||||
+ args: args{path: fileWithContent.Path()},
|
||||
+ want: "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824",
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-hash file with empty path",
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-hash file with invalid path",
|
||||
+ args: args{path: "path not exist"},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-hash file with directory path",
|
||||
+ args: args{path: dir.Path()},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-hash file with special device",
|
||||
+ args: args{path: "/dev/cdrom"},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ got, err := hashFile(tt.args.path)
|
||||
+ if (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("hashFile() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ return
|
||||
+ }
|
||||
+ if got != tt.want {
|
||||
+ t.Errorf("hashFile() = %v, want %v", got, tt.want)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func TestHashDir(t *testing.T) {
|
||||
+ root := fs.NewDir(t, t.Name())
|
||||
+ defer root.Remove()
|
||||
+
|
||||
+ rootSub1 := root.Join("sub1")
|
||||
+ os.MkdirAll(rootSub1, constant.DefaultRootDirMode)
|
||||
+ defer os.RemoveAll(rootSub1)
|
||||
+ rootSub1File := filepath.Join(rootSub1, "rootSub1File")
|
||||
+ ioutil.WriteFile(rootSub1File, []byte("hello1"), constant.DefaultRootFileMode)
|
||||
+ defer os.RemoveAll(rootSub1File)
|
||||
+
|
||||
+ rootSub11 := filepath.Join(rootSub1, "sub11")
|
||||
+ os.MkdirAll(rootSub11, constant.DefaultRootDirMode)
|
||||
+ defer os.RemoveAll(rootSub11)
|
||||
+ rootSub11File := filepath.Join(rootSub11, "rootSub11File")
|
||||
+ ioutil.WriteFile(rootSub11File, []byte("hello11"), constant.DefaultRootFileMode)
|
||||
+ defer os.RemoveAll(rootSub11File)
|
||||
+
|
||||
+ emptyDir := fs.NewDir(t, t.Name())
|
||||
+ defer emptyDir.Remove()
|
||||
+ emptyFile := root.Join("empty.tar")
|
||||
+ _, err := os.Create(emptyFile)
|
||||
+ assert.NilError(t, err)
|
||||
+ defer os.RemoveAll(emptyFile)
|
||||
+
|
||||
+ type args struct {
|
||||
+ path string
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ want string
|
||||
+ wantErr bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-hash empty dir",
|
||||
+ args: args{path: emptyDir.Path()},
|
||||
+ want: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-hash not exist dir",
|
||||
+ args: args{path: "path not exist"},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-hash multiple dirs",
|
||||
+ args: args{path: root.Path()},
|
||||
+ want: "bdaaa88766b974876a14d85620b5a26795735c332445783a3a067e0052a59478",
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ got, err := hashDir(tt.args.path)
|
||||
+ if (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("hashDir() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ return
|
||||
+ }
|
||||
+ if got != tt.want {
|
||||
+ t.Errorf("hashDir() = %v, want %v", got, tt.want)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func TestSHA256Sum(t *testing.T) {
|
||||
+ root := fs.NewDir(t, t.Name())
|
||||
+ defer root.Remove()
|
||||
+
|
||||
+ rootSub1 := root.Join("sub1")
|
||||
+ os.MkdirAll(rootSub1, constant.DefaultRootDirMode)
|
||||
+ defer os.RemoveAll(rootSub1)
|
||||
+ rootSub1File := filepath.Join(rootSub1, "rootSub1File")
|
||||
+ ioutil.WriteFile(rootSub1File, []byte("hello1"), constant.DefaultRootFileMode)
|
||||
+ defer os.RemoveAll(rootSub1File)
|
||||
+
|
||||
+ emptyDir := fs.NewDir(t, t.Name())
|
||||
+ defer emptyDir.Remove()
|
||||
+ emptyFile := root.Join("empty.tar")
|
||||
+ _, err := os.Create(emptyFile)
|
||||
+ assert.NilError(t, err)
|
||||
+ defer os.RemoveAll(emptyFile)
|
||||
+
|
||||
+ type args struct {
|
||||
+ path string
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ want string
|
||||
+ wantErr bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-for dir",
|
||||
+ args: args{path: root.Path()},
|
||||
+ want: "6a29015d578de92eabad6b20b3e3c0d4df521b03728cb4ee5667b15742154646",
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-for file only",
|
||||
+ args: args{path: emptyFile},
|
||||
+ want: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-for invalid file",
|
||||
+ args: args{path: "/dev/cdrom"},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-for path not exist",
|
||||
+ args: args{path: "path not exist"},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-for empty path",
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ got, err := SHA256Sum(tt.args.path)
|
||||
+ if (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("SHA256Sum() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ return
|
||||
+ }
|
||||
+ if got != tt.want {
|
||||
+ t.Errorf("SHA256Sum() = %v, want %v", got, tt.want)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func TestCheckSum(t *testing.T) {
|
||||
+ emptyFile := fs.NewFile(t, t.Name())
|
||||
+ defer emptyFile.Remove()
|
||||
+
|
||||
+ type args struct {
|
||||
+ path string
|
||||
+ target string
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ wantErr bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-normal case",
|
||||
+ args: args{
|
||||
+ path: emptyFile.Path(),
|
||||
+ target: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
||||
+ },
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-check sum failed",
|
||||
+ args: args{path: emptyFile.Path(), target: "wrong"},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-empty path",
|
||||
+ args: args{target: "wrong"},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ if err := CheckSum(tt.args.path, tt.args.target); (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("CheckSum() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
diff --git a/util/common.go b/util/common.go
|
||||
index 4782b2ec..ff85da9c 100644
|
||||
--- a/util/common.go
|
||||
+++ b/util/common.go
|
||||
@@ -192,7 +192,8 @@ func IsValidImageName(name string) bool {
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
- if _, canonical := ref.(reference.Canonical); canonical {
|
||||
+
|
||||
+ if _, isDigest := ref.(reference.Canonical); isDigest {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
diff --git a/util/common_test.go b/util/common_test.go
|
||||
index ed9edf6e..9831971a 100644
|
||||
--- a/util/common_test.go
|
||||
+++ b/util/common_test.go
|
||||
@@ -14,11 +14,14 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
+ "io/ioutil"
|
||||
+ "os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"gotest.tools/v3/assert"
|
||||
"gotest.tools/v3/fs"
|
||||
+ constant "isula.org/isula-build"
|
||||
)
|
||||
|
||||
func TestCheckFileSize(t *testing.T) {
|
||||
@@ -179,3 +182,131 @@ func TestParseServer(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
+
|
||||
+func TestIsValidImageName(t *testing.T) {
|
||||
+ type args struct {
|
||||
+ name string
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ want bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-valid image name",
|
||||
+ args: args{name: "app:latest"},
|
||||
+ want: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-valid image name with domain",
|
||||
+ args: args{name: "localhost:5000/app:latest"},
|
||||
+ want: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-invalid image name",
|
||||
+ args: args{name: "app:latest:v1"},
|
||||
+ want: false,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-invalid image name with canonical format",
|
||||
+ args: args{name: "alpine:3.2@sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72"},
|
||||
+ want: false,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ if got := IsValidImageName(tt.args.name); got != tt.want {
|
||||
+ t.Errorf("IsValidImageName() = %v, want %v", got, tt.want)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func TestAnyFlagSet(t *testing.T) {
|
||||
+ type args struct {
|
||||
+ flags []string
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ want bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-some flag set",
|
||||
+ args: args{flags: []string{"flag1", "flag2"}},
|
||||
+ want: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-none flag set",
|
||||
+ args: args{flags: []string{}},
|
||||
+ want: false,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ if got := AnyFlagSet(tt.args.flags...); got != tt.want {
|
||||
+ t.Errorf("AnyFlagSet() = %v, want %v", got, tt.want)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+func TestCheckLoadFile(t *testing.T) {
|
||||
+ loadFile := fs.NewFile(t, t.Name())
|
||||
+ defer loadFile.Remove()
|
||||
+ err := ioutil.WriteFile(loadFile.Path(), []byte("hello"), constant.DefaultRootFileMode)
|
||||
+ assert.NilError(t, err)
|
||||
+
|
||||
+ emptyFile := fs.NewFile(t, t.Name())
|
||||
+ defer emptyFile.Remove()
|
||||
+
|
||||
+ root := fs.NewDir(t, t.Name())
|
||||
+ defer root.Remove()
|
||||
+
|
||||
+ bigFile := filepath.Join(root.Path(), "bigFile")
|
||||
+ f, err := os.Create(bigFile)
|
||||
+ assert.NilError(t, err)
|
||||
+ defer os.Remove(f.Name())
|
||||
+ err = f.Truncate(maxLoadFileSize + 1)
|
||||
+ assert.NilError(t, err)
|
||||
+
|
||||
+ type args struct {
|
||||
+ path string
|
||||
+ }
|
||||
+ tests := []struct {
|
||||
+ name string
|
||||
+ args args
|
||||
+ wantErr bool
|
||||
+ }{
|
||||
+ {
|
||||
+ name: "TC-normal load file",
|
||||
+ args: args{path: loadFile.Path()},
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-load file not exist",
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-empty load file",
|
||||
+ args: args{path: emptyFile.Path()},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-invalid load file",
|
||||
+ args: args{path: "/dev/cdrom"},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ {
|
||||
+ name: "TC-load file too big",
|
||||
+ args: args{path: bigFile},
|
||||
+ wantErr: true,
|
||||
+ },
|
||||
+ }
|
||||
+ for _, tt := range tests {
|
||||
+ t.Run(tt.name, func(t *testing.T) {
|
||||
+ if err := CheckLoadFile(tt.args.path); (err != nil) != tt.wantErr {
|
||||
+ t.Errorf("CheckLoadFile() error = %v, wantErr %v", err, tt.wantErr)
|
||||
+ }
|
||||
+ })
|
||||
+ }
|
||||
+}
|
||||
--
|
||||
2.27.0
|
||||
|
||||
14
series.conf
14
series.conf
@ -34,3 +34,17 @@ patch/0068-change-golangci-lint-config-and-remove-redundant-che.patch
|
||||
patch/0069-make-add-make-info-for-Makefile.patch
|
||||
patch/0070-clean-code-all-latest-tag-checks-take-the-FindImage-.patch
|
||||
patch/0071-use-image.GetNamedTaggedReference-instead-of-dockerf.patch
|
||||
patch/0072-protocol-define-separator-protocol.patch
|
||||
patch/0073-cli-finish-client-save-separated-image.patch
|
||||
patch/0074-daemon-finish-daemon-save-separated-image.patch
|
||||
patch/0075-cli-finish-client-load-separated-image.patch
|
||||
patch/0076-daemon-finish-daemon-load-separated-image.patch
|
||||
patch/0077-test-optimize-save-client-options-and-add-unit-test.patch
|
||||
patch/0078-test-optimize-load-client-options-and-add-unit-test.patch
|
||||
patch/0079-bugfix-fix-when-load-separated-image-error-return.patch
|
||||
patch/0080-util-add-unit-test-for-file.go.patch
|
||||
patch/0081-test-cleancode-test-for-better-experience.patch
|
||||
patch/0082-test-optimize-scripts-in-hack.patch
|
||||
patch/0083-test-add-common-function-for-testing-separated-image.patch
|
||||
patch/0084-test-add-integration-tests-for-saving-and-loading-se.patch
|
||||
patch/0085-util-add-unit-test-for-increment-util-functions.patch
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user