diff --git a/api/analysis.go b/api/analysis.go index 8350fe3a..43dd0d89 100644 --- a/api/analysis.go +++ b/api/analysis.go @@ -26,10 +26,14 @@ import ( const ( AnalysesRoot = "/analyses" AnalysisRoot = AnalysesRoot + "/:" + ID + AnalysisArchiveRoot = AnalysisRoot + "/archive" + AnalysisIssuesRoot = AnalysisRoot + "/issues" + AnalysisIncidentsRoot = AnalysesIssueRoot + "/incidents" AnalysesDepsRoot = AnalysesRoot + "/dependencies" AnalysesIssuesRoot = AnalysesRoot + "/issues" AnalysesIssueRoot = AnalysesIssuesRoot + "/:" + ID - AnalysisIncidentsRoot = AnalysesIssueRoot + "/incidents" + AnalysesIncidentsRoot = AnalysesRoot + "/incidents" + AnalysesIncidentRoot = AnalysesIncidentsRoot + "/:" + ID // AnalysesReportRoot = AnalysesRoot + "/report" AnalysisReportDepsRoot = AnalysesReportRoot + "/dependencies" @@ -65,11 +69,17 @@ func (h AnalysisHandler) AddRoutes(e *gin.Engine) { routeGroup := e.Group("/") routeGroup.Use(Required("analyses")) routeGroup.GET(AnalysisRoot, h.Get) + routeGroup.POST(AnalysisArchiveRoot, h.Archive) + routeGroup.GET(AnalysesRoot, h.List) routeGroup.DELETE(AnalysisRoot, h.Delete) routeGroup.GET(AnalysesDepsRoot, h.Deps) routeGroup.GET(AnalysesIssuesRoot, h.Issues) routeGroup.GET(AnalysesIssueRoot, h.Issue) - routeGroup.GET(AnalysisIncidentsRoot, h.Incidents) + routeGroup.GET(AnalysesIncidentsRoot, h.Incidents) + routeGroup.GET(AnalysesIncidentRoot, h.Incident) + routeGroup.GET(AnalysisIssuesRoot, h.AnalysisIssues) + routeGroup.GET(AnalysisIncidentsRoot, h.IssueIncidents) + // Report routeGroup.GET(AnalysisReportRuleRoot, h.RuleReports) routeGroup.GET(AnalysisReportAppsIssuesRoot, h.AppIssueReports) routeGroup.GET(AnalysisReportIssuesAppsRoot, h.IssueAppReports) @@ -110,6 +120,64 @@ func (h AnalysisHandler) Get(ctx *gin.Context) { ctx.File(path) } +// List godoc +// @summary List analyses. +// @description List analyses. +// @description Resources do not include relations. +// @tags analyses +// @produce json +// @success 200 {object} []api.Analysis +// @router /analyses [get] +func (h AnalysisHandler) List(ctx *gin.Context) { + resources := []Analysis{} + filter, err := qf.New(ctx, + []qf.Assert{ + {Field: "id", Kind: qf.LITERAL}, + }) + if err != nil { + _ = ctx.Error(err) + return + } + sort := Sort{} + err = sort.With(ctx, &model.Analysis{}) + if err != nil { + _ = ctx.Error(err) + return + } + // Find + db := h.DB(ctx) + db = db.Model(&model.Analysis{}) + db = db.Preload("Application") + db = db.Omit("Summary") + db = filter.Where(db) + db = sort.Sorted(db) + var list []model.Analysis + var m model.Analysis + page := Page{} + page.With(ctx) + cursor := Cursor{} + cursor.With(db, page) + defer func() { + cursor.Close() + }() + for cursor.Next(&m) { + if cursor.Error != nil { + _ = ctx.Error(cursor.Error) + return + } + list = append(list, m) + } + // Render + for i := range list { + m := &list[i] + r := Analysis{} + r.With(m) + resources = append(resources, r) + } + + h.Respond(ctx, http.StatusOK, resources) +} + // AppLatest godoc // @summary Get the latest analysis. // @description Get the latest analysis for an application. @@ -141,6 +209,32 @@ func (h AnalysisHandler) AppLatest(ctx *gin.Context) { ctx.File(path) } +// Archive godoc +// @summary Archive an analysis (report) by ID. +// @description Archive an analysis (report) by ID. +// @tags analyses +// @produce octet-stream +// @success 204 {object} +// @router /analyses/{id}/archive [post] +// @param id path int true "Analysis ID" +func (h AnalysisHandler) Archive(ctx *gin.Context) { + id := h.pk(ctx) + m := &model.Analysis{} + db := h.DB(ctx).Select(ID) + err := db.First(m, id).Error + if err != nil { + _ = ctx.Error(err) + return + } + err = h.archiveById(ctx) + if err != nil { + _ = ctx.Error(err) + return + } + + h.Status(ctx, http.StatusNoContent) +} + // AppLatestReport godoc // @summary Get the latest analysis (static) report. // @description Get the latest analysis (static) report. @@ -185,6 +279,8 @@ func (h AnalysisHandler) AppList(ctx *gin.Context) { db := h.DB(ctx) db = db.Model(&model.Analysis{}) db = db.Where("ApplicationID = ?", id) + db = db.Preload("Application") + db = db.Omit("Summary") db = sort.Sorted(db) var list []model.Analysis var m model.Analysis @@ -202,15 +298,11 @@ func (h AnalysisHandler) AppList(ctx *gin.Context) { } list = append(list, m) } - err = h.WithCount(ctx, cursor.Count()) - if err != nil { - _ = ctx.Error(err) - return - } // Render for i := range list { + m := &list[i] r := Analysis{} - r.With(&list[i]) + r.With(m) resources = append(resources, r) } @@ -236,20 +328,12 @@ func (h AnalysisHandler) AppCreate(ctx *gin.Context) { _ = ctx.Error(result.Error) return } - err := h.archive(ctx) - if err != nil { - _ = ctx.Error(err) - return - } - analysis := &model.Analysis{} - analysis.ApplicationID = id - analysis.CreateUser = h.BaseHandler.CurrentUser(ctx) - db := h.DB(ctx) - db.Logger = db.Logger.LogMode(logger.Error) - err = db.Create(analysis).Error - if err != nil { - _ = ctx.Error(err) - return + if Settings.Analysis.ArchiverEnabled { + err := h.archiveByApp(ctx) + if err != nil { + _ = ctx.Error(err) + return + } } // // Analysis @@ -282,6 +366,16 @@ func (h AnalysisHandler) AppCreate(ctx *gin.Context) { _ = ctx.Error(err) return } + analysis := r.Model() + analysis.ApplicationID = id + analysis.CreateUser = h.BaseHandler.CurrentUser(ctx) + db := h.DB(ctx) + db.Logger = db.Logger.LogMode(logger.Error) + err = db.Create(analysis).Error + if err != nil { + _ = ctx.Error(err) + return + } // // Issues input, err = ctx.FormFile(IssueField) @@ -388,7 +482,7 @@ func (h AnalysisHandler) AppCreate(ctx *gin.Context) { } db = h.DB(ctx) - db = db.Preload(clause.Associations) + db = db.Preload("Application") err = db.First(analysis).Error if err != nil { _ = ctx.Error(err) @@ -521,7 +615,6 @@ func (h AnalysisHandler) AppDeps(ctx *gin.Context) { // @router /application/{id}/analysis/issues [get] // @param id path int true "Application ID" func (h AnalysisHandler) AppIssues(ctx *gin.Context) { - resources := []Issue{} // Latest id := h.pk(ctx) analysis := &model.Analysis{} @@ -545,49 +638,23 @@ func (h AnalysisHandler) AppIssues(ctx *gin.Context) { _ = ctx.Error(err) return } - // Sort - sort := Sort{} - err = sort.With(ctx, &model.Issue{}) + // Render + writer := IssueWriter{ctx: ctx} + path, count, err := writer.Create(analysis.ID, filter) if err != nil { _ = ctx.Error(err) return } - // Find - db = h.DB(ctx) - db = db.Model(&model.Issue{}) - db = db.Where("AnalysisID = ?", analysis.ID) - db = db.Where("ID IN (?)", h.issueIDs(ctx, filter)) - db = sort.Sorted(db) - var list []model.Issue - var m model.Issue - page := Page{} - page.With(ctx) - cursor := Cursor{} - cursor.With(db, page) defer func() { - cursor.Close() + _ = os.Remove(path) }() - for cursor.Next(&m) { - if cursor.Error != nil { - _ = ctx.Error(cursor.Error) - return - } - list = append(list, m) - } - err = h.WithCount(ctx, cursor.Count()) + err = h.WithCount(ctx, count) if err != nil { _ = ctx.Error(err) return } - // Render - for i := range list { - m := &list[i] - r := Issue{} - r.With(m) - resources = append(resources, r) - } - - h.Respond(ctx, http.StatusOK, resources) + h.Status(ctx, http.StatusOK) + ctx.File(path) } // Issues godoc @@ -608,7 +675,6 @@ func (h AnalysisHandler) AppIssues(ctx *gin.Context) { // @success 200 {object} []api.Issue // @router /analyses/issues [get] func (h AnalysisHandler) Issues(ctx *gin.Context) { - resources := []Issue{} // Filter filter, err := qf.New(ctx, []qf.Assert{ @@ -626,52 +692,73 @@ func (h AnalysisHandler) Issues(ctx *gin.Context) { _ = ctx.Error(err) return } - // Sort - sort := Sort{} - err = sort.With(ctx, &model.Issue{}) + // Render + writer := IssueWriter{ctx: ctx} + path, count, err := writer.Create(0, filter) if err != nil { _ = ctx.Error(err) return } - // Find - db := h.DB(ctx) - db = db.Table("Issue i") - db = db.Joins(",Analysis a") - db = db.Where("a.ID = i.AnalysisID") - db = db.Where("a.ID IN (?)", h.analysisIDs(ctx, filter)) - db = db.Where("i.ID IN (?)", h.issueIDs(ctx, filter)) - db = db.Group("i.ID") - db = sort.Sorted(db) - var list []model.Issue - var m model.Issue - page := Page{} - page.With(ctx) - cursor := Cursor{} - cursor.With(db, page) defer func() { - cursor.Close() + _ = os.Remove(path) }() - for cursor.Next(&m) { - if cursor.Error != nil { - _ = ctx.Error(cursor.Error) - return - } - list = append(list, m) + err = h.WithCount(ctx, count) + if err != nil { + _ = ctx.Error(err) + return } - err = h.WithCount(ctx, cursor.Count()) + h.Status(ctx, http.StatusOK) + ctx.File(path) +} + +// AnalysisIssues godoc +// @summary List issues for an analysis. +// @description List issues for an analysis. +// @description filters: +// @description - ruleset +// @description - rule +// @description - name +// @description - category +// @description - effort +// @description - labels +// @tags issues +// @produce json +// @success 200 {object} []api.Issue +// @router /analyses/{id}/issues [get] +// @param id path int true "Analysis ID" +func (h AnalysisHandler) AnalysisIssues(ctx *gin.Context) { + // Filter + filter, err := qf.New(ctx, + []qf.Assert{ + {Field: "ruleset", Kind: qf.STRING}, + {Field: "rule", Kind: qf.STRING}, + {Field: "name", Kind: qf.STRING}, + {Field: "category", Kind: qf.STRING}, + {Field: "effort", Kind: qf.LITERAL}, + {Field: "labels", Kind: qf.STRING, And: true}, + }) if err != nil { _ = ctx.Error(err) return } // Render - for i := range list { - m := &list[i] - r := Issue{} - r.With(m) - resources = append(resources, r) + id := h.pk(ctx) + writer := IssueWriter{ctx: ctx} + path, count, err := writer.Create(id, filter) + if err != nil { + _ = ctx.Error(err) + return } - - h.Respond(ctx, http.StatusOK, resources) + defer func() { + _ = os.Remove(path) + }() + err = h.WithCount(ctx, count) + if err != nil { + _ = ctx.Error(err) + return + } + h.Status(ctx, http.StatusOK) + ctx.File(path) } // Issue godoc @@ -699,6 +786,74 @@ func (h AnalysisHandler) Issue(ctx *gin.Context) { } // Incidents godoc +// @summary List all incidents. +// @description List all incidents. +// @description filters: +// @description - file +// @description - issue.id +// @tags incidents +// @produce json +// @success 200 {object} []api.Incident +// @router /analyses/incidents [get] +func (h AnalysisHandler) Incidents(ctx *gin.Context) { + // Filter + filter, err := qf.New(ctx, + []qf.Assert{ + {Field: "file", Kind: qf.STRING}, + {Field: "issue.id", Kind: qf.STRING}, + }) + if err != nil { + _ = ctx.Error(err) + return + } + filter = filter.Renamed("issue.id", "issueid") + // Sort + sort := Sort{} + err = sort.With(ctx, &model.Incident{}) + if err != nil { + _ = ctx.Error(err) + return + } + // Find + db := h.DB(ctx) + db = db.Model(&model.Incident{}) + db = filter.Where(db) + db = sort.Sorted(db) + var list []model.Incident + var m model.Incident + cursor := Cursor{} + defer func() { + cursor.Close() + }() + page := Page{} + page.With(ctx) + cursor.With(db, page) + for cursor.Next(&m) { + if cursor.Error != nil { + _ = ctx.Error(cursor.Error) + return + } + list = append(list, m) + } + err = h.WithCount(ctx, cursor.Count()) + if err != nil { + _ = ctx.Error(err) + return + } + // Render + resources := []Incident{} + for _, m := range list { + r := Incident{} + r.With(&m) + resources = append( + resources, + r) + } + + h.Respond(ctx, http.StatusOK, resources) +} + +// IssueIncidents godoc // @summary List incidents for an issue. // @description List incidents for an issue. // @description filters: @@ -708,7 +863,7 @@ func (h AnalysisHandler) Issue(ctx *gin.Context) { // @success 200 {object} []api.Incident // @router /analyses/issues/{id}/incidents [get] // @param id path int true "Issue ID" -func (h AnalysisHandler) Incidents(ctx *gin.Context) { +func (h AnalysisHandler) IssueIncidents(ctx *gin.Context) { issueId := ctx.Param(ID) // Filter filter, err := qf.New(ctx, @@ -766,6 +921,29 @@ func (h AnalysisHandler) Incidents(ctx *gin.Context) { h.Respond(ctx, http.StatusOK, resources) } +// Incident godoc +// @summary Get an incident. +// @description Get an incident. +// @tags issue +// @produce json +// @success 200 {object} api.Incident +// @router /analyses/incidents/{id} [get] +// @param id path int true "Issue ID" +func (h AnalysisHandler) Incident(ctx *gin.Context) { + id := h.pk(ctx) + m := &model.Incident{} + db := h.DB(ctx) + err := db.First(m, id).Error + if err != nil { + _ = ctx.Error(err) + return + } + r := Incident{} + r.With(m) + + h.Respond(ctx, http.StatusOK, r) +} + // RuleReports godoc // @summary List rule reports. // @description Each report collates issues by ruleset/rule. @@ -867,6 +1045,7 @@ func (h AnalysisHandler) RuleReports(ctx *gin.Context) { _ = ctx.Error(err) return } + // Render for i := range list { m := list[i] @@ -1331,7 +1510,81 @@ func (h AnalysisHandler) Deps(ctx *gin.Context) { // Find db := h.DB(ctx) db = db.Model(&model.TechDependency{}) - db = db.Where("AnalysisID IN (?)", h.analysisIDs(ctx, filter)) + db = db.Where("AnalysisID IN (?)", h.analysesIDs(ctx, filter)) + db = db.Where("ID IN (?)", h.depIDs(ctx, filter)) + db = sort.Sorted(db) + var list []model.TechDependency + var m model.TechDependency + page := Page{} + page.With(ctx) + cursor := Cursor{} + cursor.With(db, page) + defer func() { + cursor.Close() + }() + for cursor.Next(&m) { + if cursor.Error != nil { + _ = ctx.Error(cursor.Error) + return + } + list = append(list, m) + } + err = h.WithCount(ctx, cursor.Count()) + if err != nil { + _ = ctx.Error(err) + return + } + + // Render + for i := range list { + r := TechDependency{} + r.With(&list[i]) + resources = append(resources, r) + } + + h.Respond(ctx, http.StatusOK, resources) +} + +// AnalysisDeps godoc +// @summary List analysis dependencies. +// @description List analysis dependencies. +// @description filters: +// @description - name +// @description - version +// @description - sha +// @description - indirect +// @description - labels +// @tags dependencies +// @produce json +// @success 200 {object} []api.TechDependency +// @router /analyses/{id}/dependencies [get] +// @param id path int true "Analysis ID" +func (h AnalysisHandler) AnalysisDeps(ctx *gin.Context) { + resources := []TechDependency{} + // Filter + filter, err := qf.New(ctx, + []qf.Assert{ + {Field: "name", Kind: qf.STRING}, + {Field: "version", Kind: qf.STRING}, + {Field: "sha", Kind: qf.STRING}, + {Field: "indirect", Kind: qf.STRING}, + {Field: "labels", Kind: qf.STRING, And: true}, + }) + if err != nil { + _ = ctx.Error(err) + return + } + // Sort + sort := Sort{} + err = sort.With(ctx, &model.TechDependency{}) + if err != nil { + _ = ctx.Error(err) + return + } + // Find + db := h.DB(ctx) + db = db.Model(&model.TechDependency{}) + db = db.Where("AnalysisID = ?", h.pk(ctx)) db = db.Where("ID IN (?)", h.depIDs(ctx, filter)) db = sort.Sorted(db) var list []model.TechDependency @@ -1683,6 +1936,16 @@ func (h *AnalysisHandler) appIDs(ctx *gin.Context, f qf.Filter) (q *gorm.DB) { } // analysisIDs provides analysis IDs. +func (h *AnalysisHandler) analysesIDs(ctx *gin.Context, f qf.Filter) (q *gorm.DB) { + q = h.DB(ctx) + q = q.Model(&model.Analysis{}) + q = q.Select("ID") + q = q.Where("ApplicationID IN (?)", h.appIDs(ctx, f)) + q = q.Group("ApplicationID") + return +} + +// analysisIDs provides LATEST analysis IDs. func (h *AnalysisHandler) analysisIDs(ctx *gin.Context, f qf.Filter) (q *gorm.DB) { q = h.DB(ctx) q = q.Model(&model.Analysis{}) @@ -1764,18 +2027,41 @@ func (h *AnalysisHandler) depIDs(ctx *gin.Context, f qf.Filter) (q *gorm.DB) { return } +// archiveById +// - Set the 'archived' flag. +// - Set the 'summary' field with archived issues. +// - Delete issues. +// - Delete dependencies. +func (h *AnalysisHandler) archiveById(ctx *gin.Context) (err error) { + id := h.pk(ctx) + db := h.DB(ctx) + db = db.Where("id", id) + err = h.archive(ctx, db) + return +} + +// archiveByApp +// - Set the 'archived' flag. +// - Set the 'summary' field with archived issues. +// - Delete issues. +// - Delete dependencies. +func (h *AnalysisHandler) archiveByApp(ctx *gin.Context) (err error) { + id := h.pk(ctx) + db := h.DB(ctx) + db = db.Where("ApplicationID", id) + err = h.archive(ctx, db) + return +} + // archive // - Set the 'archived' flag. // - Set the 'summary' field with archived issues. // - Delete issues. // - Delete dependencies. -func (h *AnalysisHandler) archive(ctx *gin.Context) (err error) { - appId := h.pk(ctx) +func (h *AnalysisHandler) archive(ctx *gin.Context, q *gorm.DB) (err error) { var unarchived []model.Analysis - db := h.DB(ctx) - db = db.Where("ApplicationID", appId) - db = db.Where("Archived", false) - err = db.Find(&unarchived).Error + q = q.Where("Archived", false) + err = q.Find(&unarchived).Error if err != nil { return } @@ -1829,6 +2115,7 @@ func (h *AnalysisHandler) archive(ctx *gin.Context) (err error) { type Analysis struct { Resource `yaml:",inline"` Effort int `json:"effort"` + Commit string `json:"commit,omitempty" yaml:",omitempty"` Archived bool `json:"archived,omitempty" yaml:",omitempty"` Issues []Issue `json:"issues,omitempty" yaml:",omitempty"` Dependencies []TechDependency `json:"dependencies,omitempty" yaml:",omitempty"` @@ -1839,52 +2126,23 @@ type Analysis struct { func (r *Analysis) With(m *model.Analysis) { r.Resource.With(&m.Model) r.Effort = m.Effort + r.Commit = m.Commit r.Archived = m.Archived - r.Issues = []Issue{} - for i := range m.Issues { - n := Issue{} - n.With(&m.Issues[i]) - r.Issues = append( - r.Issues, - n) - } - r.Dependencies = []TechDependency{} - for i := range m.Dependencies { - n := TechDependency{} - n.With(&m.Dependencies[i]) - r.Dependencies = append( - r.Dependencies, - n) - } - if m.Summary != nil { - _ = json.Unmarshal(m.Summary, &r.Summary) - } } // Model builds a model. func (r *Analysis) Model() (m *model.Analysis) { m = &model.Analysis{} m.Effort = r.Effort + m.Commit = r.Commit m.Issues = []model.Issue{} - for i := range r.Issues { - n := r.Issues[i].Model() - m.Issues = append( - m.Issues, - *n) - } - m.Dependencies = []model.TechDependency{} - for i := range r.Dependencies { - n := r.Dependencies[i].Model() - m.Dependencies = append( - m.Dependencies, - *n) - } return } // Issue REST resource. type Issue struct { Resource `yaml:",inline"` + Analysis uint `json:"analysis"` RuleSet string `json:"ruleset" binding:"required"` Rule string `json:"rule" binding:"required"` Name string `json:"name" binding:"required"` @@ -1900,6 +2158,7 @@ type Issue struct { // With updates the resource with the model. func (r *Issue) With(m *model.Issue) { r.Resource.With(&m.Model) + r.Analysis = m.AnalysisID r.RuleSet = m.RuleSet r.Rule = m.Rule r.Name = m.Name @@ -1950,6 +2209,7 @@ func (r *Issue) Model() (m *model.Issue) { // TechDependency REST resource. type TechDependency struct { Resource `yaml:",inline"` + Analysis uint `json:"analysis"` Provider string `json:"provider" yaml:",omitempty"` Name string `json:"name" binding:"required"` Version string `json:"version,omitempty" yaml:",omitempty"` @@ -1961,6 +2221,7 @@ type TechDependency struct { // With updates the resource with the model. func (r *TechDependency) With(m *model.TechDependency) { r.Resource.With(&m.Model) + r.Analysis = m.AnalysisID r.Provider = m.Provider r.Name = m.Name r.Version = m.Version @@ -1987,6 +2248,7 @@ func (r *TechDependency) Model() (m *model.TechDependency) { // Incident REST resource. type Incident struct { Resource `yaml:",inline"` + Issue uint `json:"issue"` File string `json:"file"` Line int `json:"line"` Message string `json:"message"` @@ -1997,6 +2259,7 @@ type Incident struct { // With updates the resource with the model. func (r *Incident) With(m *model.Incident) { r.Resource.With(&m.Model) + r.Issue = m.IssueID r.File = m.File r.Line = m.Line r.Message = m.Message @@ -2107,6 +2370,105 @@ type DepAppReport struct { // FactMap map. type FactMap map[string]interface{} +// IssueWriter used to create a file containing issues. +type IssueWriter struct { + encoder + ctx *gin.Context +} + +// Create an issues file and returns the path. +func (r *IssueWriter) Create(id uint, filter qf.Filter) (path string, count int64, err error) { + ext := ".json" + accepted := r.ctx.NegotiateFormat(BindMIMEs...) + switch accepted { + case "", + binding.MIMEPOSTForm, + binding.MIMEJSON: + case binding.MIMEYAML: + ext = ".yaml" + default: + err = &BadRequestError{"MIME not supported."} + } + file, err := os.CreateTemp("", "issue-*"+ext) + if err != nil { + return + } + defer func() { + _ = file.Close() + }() + path = file.Name() + count, err = r.Write(id, filter, file) + return +} + +// db returns a db client. +func (r *IssueWriter) db() (db *gorm.DB) { + rtx := WithContext(r.ctx) + db = rtx.DB.Debug() + return +} + +// Write the analysis file. +func (r *IssueWriter) Write(id uint, filter qf.Filter, output io.Writer) (count int64, err error) { + r.encoder, err = r.newEncoder(output) + if err != nil { + return + } + page := Page{} + page.With(r.ctx) + sort := Sort{} + err = sort.With(r.ctx, &model.Issue{}) + if err != nil { + return + } + r.beginList() + batch := 10 + for b := page.Offset; ; b += batch { + db := r.db() + if id > 0 { + db = db.Where("AnalysisID", id) + } + db = filter.Where(db) + db = db.Preload("Incidents") + db = db.Limit(batch) + db = db.Offset(b) + db = sort.Sorted(db) + var issues []model.Issue + err = db.Find(&issues).Error + if err != nil { + return + } + if len(issues) == 0 { + break + } + for i := range issues { + issue := Issue{} + issue.With(&issues[i]) + r.writeItem(b, i, issue) + count++ + } + } + r.endList() + return +} + +// newEncoder returns an encoder. +func (r *IssueWriter) newEncoder(output io.Writer) (encoder encoder, err error) { + accepted := r.ctx.NegotiateFormat(BindMIMEs...) + switch accepted { + case "", + binding.MIMEPOSTForm, + binding.MIMEJSON: + encoder = &jsonEncoder{output: output} + case binding.MIMEYAML: + encoder = &yamlEncoder{output: output} + default: + err = &BadRequestError{"MIME not supported."} + } + + return +} + // AnalysisWriter used to create a file containing an analysis. type AnalysisWriter struct { encoder diff --git a/hack/add/analysis.sh b/hack/add/analysis.sh index 4fb1b30f..bb2cb79e 100755 --- a/hack/add/analysis.sh +++ b/hack/add/analysis.sh @@ -197,6 +197,7 @@ version: 8 # file=${aPath} echo -n "--- +commit: "42b22a90" issues: dependencies: " > ${file} diff --git a/migration/pkg.go b/migration/pkg.go index 0887c5f2..eab172e6 100644 --- a/migration/pkg.go +++ b/migration/pkg.go @@ -6,6 +6,7 @@ import ( v11 "github.com/konveyor/tackle2-hub/migration/v11" v12 "github.com/konveyor/tackle2-hub/migration/v12" v13 "github.com/konveyor/tackle2-hub/migration/v13" + v14 "github.com/konveyor/tackle2-hub/migration/v14" v2 "github.com/konveyor/tackle2-hub/migration/v2" v3 "github.com/konveyor/tackle2-hub/migration/v3" v4 "github.com/konveyor/tackle2-hub/migration/v4" @@ -54,5 +55,6 @@ func All() []Migration { v11.Migration{}, v12.Migration{}, v13.Migration{}, + v14.Migration{}, } } diff --git a/migration/v14/migrate.go b/migration/v14/migrate.go new file mode 100644 index 00000000..45df7860 --- /dev/null +++ b/migration/v14/migrate.go @@ -0,0 +1,20 @@ +package v14 + +import ( + "github.com/jortel/go-utils/logr" + "github.com/konveyor/tackle2-hub/migration/v14/model" + "gorm.io/gorm" +) + +var log = logr.WithName("migration|v13") + +type Migration struct{} + +func (r Migration) Apply(db *gorm.DB) (err error) { + err = db.AutoMigrate(r.Models()...) + return +} + +func (r Migration) Models() []interface{} { + return model.All() +} diff --git a/migration/v14/model/analysis.go b/migration/v14/model/analysis.go new file mode 100644 index 00000000..4b61e1f4 --- /dev/null +++ b/migration/v14/model/analysis.go @@ -0,0 +1,157 @@ +package model + +import "gorm.io/gorm" + +// Analysis report. +type Analysis struct { + Model + Effort int + Commit string + Archived bool + Summary JSON `gorm:"type:json"` + Issues []Issue `gorm:"constraint:OnDelete:CASCADE"` + Dependencies []TechDependency `gorm:"constraint:OnDelete:CASCADE"` + ApplicationID uint `gorm:"index;not null"` + Application *Application +} + +// TechDependency report dependency. +type TechDependency struct { + Model + Provider string `gorm:"uniqueIndex:depA"` + Name string `gorm:"uniqueIndex:depA"` + Version string `gorm:"uniqueIndex:depA"` + SHA string `gorm:"uniqueIndex:depA"` + Indirect bool + Labels JSON `gorm:"type:json"` + AnalysisID uint `gorm:"index;uniqueIndex:depA;not null"` + Analysis *Analysis +} + +// Issue report issue (violation). +type Issue struct { + Model + RuleSet string `gorm:"uniqueIndex:issueA;not null"` + Rule string `gorm:"uniqueIndex:issueA;not null"` + Name string `gorm:"index"` + Description string + Category string `gorm:"index;not null"` + Incidents []Incident `gorm:"foreignKey:IssueID;constraint:OnDelete:CASCADE"` + Links JSON `gorm:"type:json"` + Facts JSON `gorm:"type:json"` + Labels JSON `gorm:"type:json"` + Effort int `gorm:"index;not null"` + AnalysisID uint `gorm:"index;uniqueIndex:issueA;not null"` + Analysis *Analysis +} + +// Incident report an issue incident. +type Incident struct { + Model + File string `gorm:"index;not null"` + Line int + Message string + CodeSnip string + Facts JSON `gorm:"type:json"` + IssueID uint `gorm:"index;not null"` + Issue *Issue +} + +// Link URL link. +type Link struct { + URL string `json:"url"` + Title string `json:"title,omitempty"` +} + +// ArchivedIssue resource created when issues are archived. +type ArchivedIssue struct { + RuleSet string `json:"ruleSet"` + Rule string `json:"rule"` + Name string `json:"name,omitempty" yaml:",omitempty"` + Description string `json:"description,omitempty" yaml:",omitempty"` + Category string `json:"category"` + Effort int `json:"effort"` + Incidents int `json:"incidents"` +} + +// RuleSet - Analysis ruleset. +type RuleSet struct { + Model + UUID *string `gorm:"uniqueIndex"` + Kind string + Name string `gorm:"uniqueIndex;not null"` + Description string + Repository JSON `gorm:"type:json"` + IdentityID *uint `gorm:"index"` + Identity *Identity + Rules []Rule `gorm:"constraint:OnDelete:CASCADE"` + DependsOn []RuleSet `gorm:"many2many:RuleSetDependencies;constraint:OnDelete:CASCADE"` +} + +func (r *RuleSet) Builtin() bool { + return r.UUID != nil +} + +// BeforeUpdate hook to avoid cyclic dependencies. +func (r *RuleSet) BeforeUpdate(db *gorm.DB) (err error) { + seen := make(map[uint]bool) + var nextDeps []RuleSet + var nextRuleSetIDs []uint + for _, dep := range r.DependsOn { + nextRuleSetIDs = append(nextRuleSetIDs, dep.ID) + } + for len(nextRuleSetIDs) != 0 { + result := db.Preload("DependsOn").Where("ID IN ?", nextRuleSetIDs).Find(&nextDeps) + if result.Error != nil { + err = result.Error + return + } + nextRuleSetIDs = nextRuleSetIDs[:0] + for _, nextDep := range nextDeps { + for _, dep := range nextDep.DependsOn { + if seen[dep.ID] { + continue + } + if dep.ID == r.ID { + err = DependencyCyclicError{} + return + } + seen[dep.ID] = true + nextRuleSetIDs = append(nextRuleSetIDs, dep.ID) + } + } + } + + return +} + +// Rule - Analysis rule. +type Rule struct { + Model + Name string + Description string + Labels JSON `gorm:"type:json"` + RuleSetID uint `gorm:"uniqueIndex:RuleA;not null"` + RuleSet *RuleSet + FileID *uint `gorm:"uniqueIndex:RuleA" ref:"file"` + File *File +} + +// Target - analysis rule selector. +type Target struct { + Model + UUID *string `gorm:"uniqueIndex"` + Name string `gorm:"uniqueIndex;not null"` + Description string + Provider string + Choice bool + Labels JSON `gorm:"type:json"` + ImageID uint `gorm:"index" ref:"file"` + Image *File + RuleSetID *uint `gorm:"index"` + RuleSet *RuleSet +} + +func (r *Target) Builtin() bool { + return r.UUID != nil +} diff --git a/migration/v14/model/application.go b/migration/v14/model/application.go new file mode 100644 index 00000000..0531fb58 --- /dev/null +++ b/migration/v14/model/application.go @@ -0,0 +1,298 @@ +package model + +import ( + "fmt" + "sync" + "time" + + "gorm.io/gorm" +) + +type Application struct { + Model + BucketOwner + Name string `gorm:"index;unique;not null"` + Description string + Review *Review `gorm:"constraint:OnDelete:CASCADE"` + Repository JSON `gorm:"type:json"` + Binary string + Facts []Fact `gorm:"constraint:OnDelete:CASCADE"` + Comments string + Tasks []Task `gorm:"constraint:OnDelete:CASCADE"` + Tags []Tag `gorm:"many2many:ApplicationTags"` + Identities []Identity `gorm:"many2many:ApplicationIdentity;constraint:OnDelete:CASCADE"` + BusinessServiceID *uint `gorm:"index"` + BusinessService *BusinessService + OwnerID *uint `gorm:"index"` + Owner *Stakeholder `gorm:"foreignKey:OwnerID"` + Contributors []Stakeholder `gorm:"many2many:ApplicationContributors;constraint:OnDelete:CASCADE"` + Analyses []Analysis `gorm:"constraint:OnDelete:CASCADE"` + MigrationWaveID *uint `gorm:"index"` + MigrationWave *MigrationWave + Ticket *Ticket `gorm:"constraint:OnDelete:CASCADE"` + Assessments []Assessment `gorm:"constraint:OnDelete:CASCADE"` +} + +type Fact struct { + ApplicationID uint `gorm:"<-:create;primaryKey"` + Key string `gorm:"<-:create;primaryKey"` + Source string `gorm:"<-:create;primaryKey;not null"` + Value JSON `gorm:"type:json;not null"` + Application *Application +} + +// ApplicationTag represents a row in the join table for the +// many-to-many relationship between Applications and Tags. +type ApplicationTag struct { + ApplicationID uint `gorm:"primaryKey"` + TagID uint `gorm:"primaryKey"` + Source string `gorm:"primaryKey;not null"` + Application Application `gorm:"constraint:OnDelete:CASCADE"` + Tag Tag `gorm:"constraint:OnDelete:CASCADE"` +} + +// TableName must return "ApplicationTags" to ensure compatibility +// with the autogenerated join table name. +func (ApplicationTag) TableName() string { + return "ApplicationTags" +} + +// depMutex ensures Dependency.Create() is not executed concurrently. +var depMutex sync.Mutex + +type Dependency struct { + Model + ToID uint `gorm:"index"` + To *Application `gorm:"foreignKey:ToID;constraint:OnDelete:CASCADE"` + FromID uint `gorm:"index"` + From *Application `gorm:"foreignKey:FromID;constraint:OnDelete:CASCADE"` +} + +// Create a dependency synchronized using a mutex. +func (r *Dependency) Create(db *gorm.DB) (err error) { + depMutex.Lock() + defer depMutex.Unlock() + err = db.Create(r).Error + return +} + +// BeforeCreate detects cyclic dependencies. +func (r *Dependency) BeforeCreate(db *gorm.DB) (err error) { + var nextDeps []*Dependency + var nextAppsIDs []uint + nextAppsIDs = append(nextAppsIDs, r.FromID) + for len(nextAppsIDs) != 0 { + db.Where("ToID IN ?", nextAppsIDs).Find(&nextDeps) + nextAppsIDs = nextAppsIDs[:0] // empty array, but keep capacity + for _, nextDep := range nextDeps { + if nextDep.FromID == r.ToID { + err = DependencyCyclicError{} + return + } + nextAppsIDs = append(nextAppsIDs, nextDep.FromID) + } + } + + return +} + +// DependencyCyclicError reports cyclic Dependency error. +type DependencyCyclicError struct{} + +func (e DependencyCyclicError) Error() string { + return "Cyclic dependencies are not permitted." +} + +type BusinessService struct { + Model + Name string `gorm:"index;unique;not null"` + Description string + Applications []Application `gorm:"constraint:OnDelete:SET NULL"` + StakeholderID *uint `gorm:"index"` + Stakeholder *Stakeholder +} + +type JobFunction struct { + Model + UUID *string `gorm:"uniqueIndex"` + Username string + Name string `gorm:"index;unique;not null"` + Stakeholders []Stakeholder `gorm:"constraint:OnDelete:SET NULL"` +} + +type Stakeholder struct { + Model + Name string `gorm:"not null;"` + Email string `gorm:"index;unique;not null"` + Groups []StakeholderGroup `gorm:"many2many:StakeholderGroupStakeholder;constraint:OnDelete:CASCADE"` + BusinessServices []BusinessService `gorm:"constraint:OnDelete:SET NULL"` + JobFunctionID *uint `gorm:"index"` + JobFunction *JobFunction + Owns []Application `gorm:"foreignKey:OwnerID;constraint:OnDelete:SET NULL"` + Contributes []Application `gorm:"many2many:ApplicationContributors;constraint:OnDelete:CASCADE"` + MigrationWaves []MigrationWave `gorm:"many2many:MigrationWaveStakeholders;constraint:OnDelete:CASCADE"` + Assessments []Assessment `gorm:"many2many:AssessmentStakeholders;constraint:OnDelete:CASCADE"` + Archetypes []Archetype `gorm:"many2many:ArchetypeStakeholders;constraint:OnDelete:CASCADE"` +} + +type StakeholderGroup struct { + Model + Name string `gorm:"index;unique;not null"` + Username string + Description string + Stakeholders []Stakeholder `gorm:"many2many:StakeholderGroupStakeholder;constraint:OnDelete:CASCADE"` + MigrationWaves []MigrationWave `gorm:"many2many:MigrationWaveStakeholderGroups;constraint:OnDelete:CASCADE"` + Assessments []Assessment `gorm:"many2many:AssessmentStakeholderGroups;constraint:OnDelete:CASCADE"` + Archetypes []Archetype `gorm:"many2many:ArchetypeStakeholderGroups;constraint:OnDelete:CASCADE"` +} + +type MigrationWave struct { + Model + Name string `gorm:"uniqueIndex:MigrationWaveA"` + StartDate time.Time `gorm:"uniqueIndex:MigrationWaveA"` + EndDate time.Time `gorm:"uniqueIndex:MigrationWaveA"` + Applications []Application `gorm:"constraint:OnDelete:SET NULL"` + Stakeholders []Stakeholder `gorm:"many2many:MigrationWaveStakeholders;constraint:OnDelete:CASCADE"` + StakeholderGroups []StakeholderGroup `gorm:"many2many:MigrationWaveStakeholderGroups;constraint:OnDelete:CASCADE"` +} + +type Archetype struct { + Model + Name string + Description string + Comments string + Review *Review `gorm:"constraint:OnDelete:CASCADE"` + Assessments []Assessment `gorm:"constraint:OnDelete:CASCADE"` + CriteriaTags []Tag `gorm:"many2many:ArchetypeCriteriaTags;constraint:OnDelete:CASCADE"` + Tags []Tag `gorm:"many2many:ArchetypeTags;constraint:OnDelete:CASCADE"` + Stakeholders []Stakeholder `gorm:"many2many:ArchetypeStakeholders;constraint:OnDelete:CASCADE"` + StakeholderGroups []StakeholderGroup `gorm:"many2many:ArchetypeStakeholderGroups;constraint:OnDelete:CASCADE"` +} + +type Tag struct { + Model + UUID *string `gorm:"uniqueIndex"` + Name string `gorm:"uniqueIndex:tagA;not null"` + Username string + CategoryID uint `gorm:"uniqueIndex:tagA;index;not null"` + Category TagCategory +} + +type TagCategory struct { + Model + UUID *string `gorm:"uniqueIndex"` + Name string `gorm:"index;unique;not null"` + Username string + Rank uint + Color string + Tags []Tag `gorm:"foreignKey:CategoryID;constraint:OnDelete:CASCADE"` +} + +type Ticket struct { + Model + // Kind of ticket in the external tracker. + Kind string `gorm:"not null"` + // Parent resource that this ticket should belong to in the tracker. (e.g. Jira project) + Parent string `gorm:"not null"` + // Custom fields to send to the tracker when creating the ticket + Fields JSON `gorm:"type:json"` + // Whether the last attempt to do something with the ticket reported an error + Error bool + // Error message, if any + Message string + // Whether the ticket was created in the external tracker + Created bool + // Reference id in external tracker + Reference string + // URL to ticket in external tracker + Link string + // Status of ticket in external tracker + Status string + LastUpdated time.Time + Application *Application + ApplicationID uint `gorm:"uniqueIndex:ticketA;not null"` + Tracker *Tracker + TrackerID uint `gorm:"uniqueIndex:ticketA;not null"` +} + +type Tracker struct { + Model + Name string `gorm:"index;unique;not null"` + URL string + Kind string + Identity *Identity + IdentityID uint + Connected bool + LastUpdated time.Time + Message string + Insecure bool + Tickets []Ticket +} + +type Import struct { + Model + Filename string + ApplicationName string + BusinessService string + Comments string + Dependency string + DependencyDirection string + Description string + ErrorMessage string + IsValid bool + RecordType1 string + ImportSummary ImportSummary + ImportSummaryID uint `gorm:"index"` + Processed bool + ImportTags []ImportTag `gorm:"constraint:OnDelete:CASCADE"` + BinaryGroup string + BinaryArtifact string + BinaryVersion string + BinaryPackaging string + RepositoryKind string + RepositoryURL string + RepositoryBranch string + RepositoryPath string + Owner string + Contributors string +} + +func (r *Import) AsMap() (m map[string]interface{}) { + m = make(map[string]interface{}) + m["filename"] = r.Filename + m["applicationName"] = r.ApplicationName + // "Application Name" is necessary in order for + // the UI to display the error report correctly. + m["Application Name"] = r.ApplicationName + m["businessService"] = r.BusinessService + m["comments"] = r.Comments + m["dependency"] = r.Dependency + m["dependencyDirection"] = r.DependencyDirection + m["description"] = r.Description + m["errorMessage"] = r.ErrorMessage + m["isValid"] = r.IsValid + m["processed"] = r.Processed + m["recordType1"] = r.RecordType1 + for i, tag := range r.ImportTags { + m[fmt.Sprintf("category%v", i+1)] = tag.Category + m[fmt.Sprintf("tag%v", i+1)] = tag.Name + } + return +} + +type ImportSummary struct { + Model + Content []byte + Filename string + ImportStatus string + Imports []Import `gorm:"constraint:OnDelete:CASCADE"` + CreateEntities bool +} + +type ImportTag struct { + Model + Name string + Category string + ImportID uint `gorm:"index"` + Import *Import +} diff --git a/migration/v14/model/assessment.go b/migration/v14/model/assessment.go new file mode 100644 index 00000000..3a734e86 --- /dev/null +++ b/migration/v14/model/assessment.go @@ -0,0 +1,46 @@ +package model + +type Questionnaire struct { + Model + UUID *string `gorm:"uniqueIndex"` + Name string `gorm:"unique"` + Description string + Required bool + Sections JSON `gorm:"type:json"` + Thresholds JSON `gorm:"type:json"` + RiskMessages JSON `gorm:"type:json"` + Assessments []Assessment `gorm:"constraint:OnDelete:CASCADE"` +} + +// Builtin returns true if this is a Konveyor-provided questionnaire. +func (r *Questionnaire) Builtin() bool { + return r.UUID != nil +} + +type Assessment struct { + Model + ApplicationID *uint `gorm:"uniqueIndex:AssessmentA"` + Application *Application + ArchetypeID *uint `gorm:"uniqueIndex:AssessmentB"` + Archetype *Archetype + QuestionnaireID uint `gorm:"uniqueIndex:AssessmentA;uniqueIndex:AssessmentB"` + Questionnaire Questionnaire + Sections JSON `gorm:"type:json"` + Thresholds JSON `gorm:"type:json"` + RiskMessages JSON `gorm:"type:json"` + Stakeholders []Stakeholder `gorm:"many2many:AssessmentStakeholders;constraint:OnDelete:CASCADE"` + StakeholderGroups []StakeholderGroup `gorm:"many2many:AssessmentStakeholderGroups;constraint:OnDelete:CASCADE"` +} + +type Review struct { + Model + BusinessCriticality uint `gorm:"not null"` + EffortEstimate string `gorm:"not null"` + ProposedAction string `gorm:"not null"` + WorkPriority uint `gorm:"not null"` + Comments string + ApplicationID *uint `gorm:"uniqueIndex"` + Application *Application + ArchetypeID *uint `gorm:"uniqueIndex"` + Archetype *Archetype +} diff --git a/migration/v14/model/core.go b/migration/v14/model/core.go new file mode 100644 index 00000000..828545aa --- /dev/null +++ b/migration/v14/model/core.go @@ -0,0 +1,380 @@ +package model + +import ( + "encoding/json" + "os" + "path" + "time" + + "github.com/google/uuid" + liberr "github.com/jortel/go-utils/error" + "github.com/konveyor/tackle2-hub/encryption" + "gorm.io/gorm" +) + +// Model Base model. +type Model struct { + ID uint `gorm:"<-:create;primaryKey"` + CreateTime time.Time `gorm:"<-:create;autoCreateTime"` + CreateUser string `gorm:"<-:create"` + UpdateUser string +} + +type Setting struct { + Model + Key string `gorm:"<-:create;uniqueIndex"` + Value JSON `gorm:"type:json"` +} + +// With updates the value of the Setting with the json representation +// of the `value` parameter. +func (r *Setting) With(value interface{}) (err error) { + r.Value, err = json.Marshal(value) + if err != nil { + err = liberr.Wrap(err) + } + return +} + +// As unmarshalls the value of the Setting into the `ptr` parameter. +func (r *Setting) As(ptr interface{}) (err error) { + err = json.Unmarshal(r.Value, ptr) + if err != nil { + err = liberr.Wrap(err) + } + return +} + +type Bucket struct { + Model + Path string `gorm:"<-:create;uniqueIndex"` + Expiration *time.Time +} + +func (m *Bucket) BeforeCreate(db *gorm.DB) (err error) { + if m.Path == "" { + uid := uuid.New() + m.Path = path.Join( + Settings.Hub.Bucket.Path, + uid.String()) + err = os.MkdirAll(m.Path, 0777) + if err != nil { + err = liberr.Wrap( + err, + "path", + m.Path) + } + } + return +} + +type BucketOwner struct { + BucketID *uint `gorm:"index" ref:"bucket"` + Bucket *Bucket +} + +func (m *BucketOwner) BeforeCreate(db *gorm.DB) (err error) { + if !m.HasBucket() { + b := &Bucket{} + err = db.Create(b).Error + m.SetBucket(&b.ID) + } + return +} + +func (m *BucketOwner) SetBucket(id *uint) { + m.BucketID = id + m.Bucket = nil +} + +func (m *BucketOwner) HasBucket() (b bool) { + return m.BucketID != nil +} + +type File struct { + Model + Name string + Path string `gorm:"<-:create;uniqueIndex"` + Expiration *time.Time +} + +func (m *File) BeforeCreate(db *gorm.DB) (err error) { + uid := uuid.New() + m.Path = path.Join( + Settings.Hub.Bucket.Path, + ".file", + uid.String()) + err = os.MkdirAll(path.Dir(m.Path), 0777) + if err != nil { + err = liberr.Wrap( + err, + "path", + m.Path) + } + return +} + +type Task struct { + Model + BucketOwner + Name string `gorm:"index"` + Kind string + Addon string `gorm:"index"` + Extensions []string `gorm:"type:json;serializer:json"` + State string `gorm:"index"` + Locator string `gorm:"index"` + Priority int + Policy TaskPolicy `gorm:"type:json;serializer:json"` + TTL TTL `gorm:"type:json;serializer:json"` + Data Data `gorm:"type:json;serializer:json"` + Started *time.Time + Terminated *time.Time + Errors []TaskError `gorm:"type:json;serializer:json"` + Events []TaskEvent `gorm:"type:json;serializer:json"` + Pod string `gorm:"index"` + Retries int + Attached []Attachment `gorm:"type:json;serializer:json" ref:"[]file"` + Report *TaskReport `gorm:"constraint:OnDelete:CASCADE"` + ApplicationID *uint + Application *Application + TaskGroupID *uint `gorm:"<-:create"` + TaskGroup *TaskGroup +} + +func (m *Task) BeforeCreate(db *gorm.DB) (err error) { + err = m.BucketOwner.BeforeCreate(db) + return +} + +// TaskEvent task event. +type TaskEvent struct { + Kind string `json:"kind"` + Count int `json:"count"` + Reason string `json:"reason,omitempty" yaml:",omitempty"` + Last time.Time `json:"last"` +} + +// Map alias. +type Map = map[string]any + +// Any alias. +type Any any + +// Data json any field. +type Data struct { + Any +} + +// TTL time-to-live. +type TTL struct { + Created int `json:"created,omitempty" yaml:",omitempty"` + Pending int `json:"pending,omitempty" yaml:",omitempty"` + Running int `json:"running,omitempty" yaml:",omitempty"` + Succeeded int `json:"succeeded,omitempty" yaml:",omitempty"` + Failed int `json:"failed,omitempty" yaml:",omitempty"` +} + +// Ref represents a FK. +type Ref struct { + ID uint `json:"id" binding:"required"` + Name string `json:"name,omitempty" yaml:",omitempty"` +} + +// TaskError used in Task.Errors. +type TaskError struct { + Severity string `json:"severity"` + Description string `json:"description"` +} + +// TaskPolicy scheduling policy. +type TaskPolicy struct { + Isolated bool `json:"isolated,omitempty" yaml:",omitempty"` + PreemptEnabled bool `json:"preemptEnabled,omitempty" yaml:"preemptEnabled,omitempty"` + PreemptExempt bool `json:"preemptExempt,omitempty" yaml:"preemptExempt,omitempty"` +} + +// Attachment file attachment. +type Attachment struct { + ID uint `json:"id" binding:"required"` + Name string `json:"name,omitempty" yaml:",omitempty"` + Activity int `json:"activity,omitempty" yaml:",omitempty"` +} + +type TaskReport struct { + Model + Status string + Total int + Completed int + Activity []string `gorm:"type:json;serializer:json"` + Errors []TaskError `gorm:"type:json;serializer:json"` + Attached []Attachment `gorm:"type:json;serializer:json" ref:"[]file"` + Result Data `gorm:"type:json;serializer:json"` + TaskID uint `gorm:"<-:create;uniqueIndex"` + Task *Task +} + +type TaskGroup struct { + Model + BucketOwner + Name string + Kind string + Addon string + Extensions []string `gorm:"type:json;serializer:json"` + State string + Priority int + Policy TaskPolicy `gorm:"type:json;serializer:json"` + Data Data `gorm:"type:json;serializer:json"` + List []Task `gorm:"type:json;serializer:json"` + Tasks []Task `gorm:"constraint:OnDelete:CASCADE"` +} + +// Propagate group data into the task. +func (m *TaskGroup) Propagate() (err error) { + for i := range m.Tasks { + task := &m.Tasks[i] + task.Kind = m.Kind + task.Addon = m.Addon + task.Extensions = m.Extensions + task.Priority = m.Priority + task.Policy = m.Policy + task.State = m.State + task.SetBucket(m.BucketID) + if m.Data.Any != nil { + mA, castA := m.Data.Any.(map[string]any) + mB, castB := task.Data.Any.(map[string]any) + if castA && castB { + task.Data.Any = m.merge(mA, mB) + } else { + task.Data.Any = m.Data + } + } + } + + return +} + +// merge maps B into A. +// The B map is the authority. +func (m *TaskGroup) merge(a, b Map) (out Map) { + if a == nil { + a = Map{} + } + if b == nil { + b = Map{} + } + out = Map{} + // + // Merge-in elements found in B and in A. + for k, v := range a { + out[k] = v + if bv, found := b[k]; found { + out[k] = bv + if av, cast := v.(Map); cast { + if bv, cast := bv.(Map); cast { + out[k] = m.merge(av, bv) + } else { + out[k] = bv + } + } + } + } + // + // Add elements found only in B. + for k, v := range b { + if _, found := a[k]; !found { + out[k] = v + } + } + + return +} + +// Proxy configuration. +// kind = (http|https) +type Proxy struct { + Model + Enabled bool + Kind string `gorm:"uniqueIndex"` + Host string `gorm:"not null"` + Port int + Excluded JSON `gorm:"type:json"` + IdentityID *uint `gorm:"index"` + Identity *Identity +} + +// Identity represents and identity with a set of credentials. +type Identity struct { + Model + Kind string `gorm:"not null"` + Name string `gorm:"index;unique;not null"` + Description string + User string + Password string + Key string + Settings string + Proxies []Proxy `gorm:"constraint:OnDelete:SET NULL"` +} + +// Encrypt sensitive fields. +// The ref identity is used to determine when sensitive fields +// have changed and need to be (re)encrypted. +func (r *Identity) Encrypt(ref *Identity) (err error) { + passphrase := Settings.Encryption.Passphrase + aes := encryption.New(passphrase) + if r.Password != ref.Password { + if r.Password != "" { + r.Password, err = aes.Encrypt(r.Password) + if err != nil { + err = liberr.Wrap(err) + return + } + } + } + if r.Key != ref.Key { + if r.Key != "" { + r.Key, err = aes.Encrypt(r.Key) + if err != nil { + err = liberr.Wrap(err) + return + } + } + } + if r.Settings != ref.Settings { + if r.Settings != "" { + r.Settings, err = aes.Encrypt(r.Settings) + if err != nil { + err = liberr.Wrap(err) + return + } + } + } + return +} + +// Decrypt sensitive fields. +func (r *Identity) Decrypt() (err error) { + passphrase := Settings.Encryption.Passphrase + aes := encryption.New(passphrase) + if r.Password != "" { + r.Password, err = aes.Decrypt(r.Password) + if err != nil { + err = liberr.Wrap(err) + return + } + } + if r.Key != "" { + r.Key, err = aes.Decrypt(r.Key) + if err != nil { + err = liberr.Wrap(err) + return + } + } + if r.Settings != "" { + r.Settings, err = aes.Decrypt(r.Settings) + if err != nil { + err = liberr.Wrap(err) + return + } + } + return +} diff --git a/migration/v14/model/pkg.go b/migration/v14/model/pkg.go new file mode 100644 index 00000000..d9615512 --- /dev/null +++ b/migration/v14/model/pkg.go @@ -0,0 +1,55 @@ +package model + +import ( + "github.com/konveyor/tackle2-hub/settings" +) + +var ( + Settings = &settings.Settings +) + +// JSON field (data) type. +type JSON = []byte + +// All builds all models. +// Models are enumerated such that each are listed after +// all the other models on which they may depend. +func All() []interface{} { + return []interface{}{ + Application{}, + TechDependency{}, + Incident{}, + Analysis{}, + Issue{}, + Bucket{}, + BusinessService{}, + Dependency{}, + File{}, + Fact{}, + Identity{}, + Import{}, + ImportSummary{}, + ImportTag{}, + JobFunction{}, + MigrationWave{}, + Proxy{}, + Review{}, + Setting{}, + RuleSet{}, + Rule{}, + Stakeholder{}, + StakeholderGroup{}, + Tag{}, + TagCategory{}, + Target{}, + Task{}, + TaskGroup{}, + TaskReport{}, + Ticket{}, + Tracker{}, + ApplicationTag{}, + Questionnaire{}, + Assessment{}, + Archetype{}, + } +} diff --git a/model/pkg.go b/model/pkg.go index 073a8bbc..6171a917 100644 --- a/model/pkg.go +++ b/model/pkg.go @@ -1,7 +1,7 @@ package model import ( - "github.com/konveyor/tackle2-hub/migration/v13/model" + "github.com/konveyor/tackle2-hub/migration/v14/model" ) // Field (data) types. diff --git a/settings/hub.go b/settings/hub.go index 08daf258..38937214 100644 --- a/settings/hub.go +++ b/settings/hub.go @@ -7,32 +7,33 @@ import ( ) const ( - EnvNamespace = "NAMESPACE" - EnvDbPath = "DB_PATH" - EnvDbSeedPath = "DB_SEED_PATH" - EnvBucketPath = "BUCKET_PATH" - EnvRwxSupported = "RWX_SUPPORTED" - EnvCachePath = "CACHE_PATH" - EnvCachePvc = "CACHE_PVC" - EnvSharedPath = "SHARED_PATH" - EnvPassphrase = "ENCRYPTION_PASSPHRASE" - EnvTaskReapCreated = "TASK_REAP_CREATED" - EnvTaskReapSucceeded = "TASK_REAP_SUCCEEDED" - EnvTaskReapFailed = "TASK_REAP_FAILED" - EnvTaskSA = "TASK_SA" - EnvTaskRetries = "TASK_RETRIES" - EnvTaskPreemptEnabled = "TASK_PREEMPT_ENABLED" - EnvTaskPreemptDelayed = "TASK_PREEMPT_DELAYED" - EnvTaskPreemptPostponed = "TASK_PREEMPT_POSTPONED" - EnvTaskPreemptRate = "TASK_PREEMPT_RATE" - EnvFrequencyTask = "FREQUENCY_TASK" - EnvFrequencyReaper = "FREQUENCY_REAPER" - EnvDevelopment = "DEVELOPMENT" - EnvBucketTTL = "BUCKET_TTL" - EnvFileTTL = "FILE_TTL" - EnvAppName = "APP_NAME" - EnvDisconnected = "DISCONNECTED" - EnvAnalysisReportPath = "ANALYSIS_REPORT_PATH" + EnvNamespace = "NAMESPACE" + EnvDbPath = "DB_PATH" + EnvDbSeedPath = "DB_SEED_PATH" + EnvBucketPath = "BUCKET_PATH" + EnvRwxSupported = "RWX_SUPPORTED" + EnvCachePath = "CACHE_PATH" + EnvCachePvc = "CACHE_PVC" + EnvSharedPath = "SHARED_PATH" + EnvPassphrase = "ENCRYPTION_PASSPHRASE" + EnvTaskReapCreated = "TASK_REAP_CREATED" + EnvTaskReapSucceeded = "TASK_REAP_SUCCEEDED" + EnvTaskReapFailed = "TASK_REAP_FAILED" + EnvTaskSA = "TASK_SA" + EnvTaskRetries = "TASK_RETRIES" + EnvTaskPreemptEnabled = "TASK_PREEMPT_ENABLED" + EnvTaskPreemptDelayed = "TASK_PREEMPT_DELAYED" + EnvTaskPreemptPostponed = "TASK_PREEMPT_POSTPONED" + EnvTaskPreemptRate = "TASK_PREEMPT_RATE" + EnvFrequencyTask = "FREQUENCY_TASK" + EnvFrequencyReaper = "FREQUENCY_REAPER" + EnvDevelopment = "DEVELOPMENT" + EnvBucketTTL = "BUCKET_TTL" + EnvFileTTL = "FILE_TTL" + EnvAppName = "APP_NAME" + EnvDisconnected = "DISCONNECTED" + EnvAnalysisReportPath = "ANALYSIS_REPORT_PATH" + EnvAnalysisArchiverEnabled = "ANALYSIS_ARCHIVER_ENABLED" ) type Hub struct { @@ -96,7 +97,8 @@ type Hub struct { Disconnected bool // Analysis settings. Analysis struct { - ReportPath string + ReportPath string + ArchiverEnabled bool } } @@ -249,7 +251,13 @@ func (r *Hub) Load() (err error) { if !found { r.Analysis.ReportPath = "/tmp/analysis/report" } - + s, found = os.LookupEnv(EnvAnalysisArchiverEnabled) + if found { + b, _ := strconv.ParseBool(s) + r.Analysis.ArchiverEnabled = b + } else { + r.Analysis.ArchiverEnabled = true + } return }