diec and refractoring
This commit is contained in:
parent
a62157e8e5
commit
0d715ccb37
20 changed files with 355 additions and 200 deletions
|
@ -36,14 +36,14 @@ func setDefaults() {
|
||||||
viper.SetDefault("web.cert", "/etc/ssl/certs/ssl-cert-snakeoil.pem")
|
viper.SetDefault("web.cert", "/etc/ssl/certs/ssl-cert-snakeoil.pem")
|
||||||
viper.SetDefault("web.key", "/etc/ssl/key/ssl-cert-snakeoil.key")
|
viper.SetDefault("web.key", "/etc/ssl/key/ssl-cert-snakeoil.key")
|
||||||
viper.SetDefault("web.loghttp", true)
|
viper.SetDefault("web.loghttp", true)
|
||||||
viper.SetDefault("web.maxfilesizemb", 100)
|
viper.SetDefault("web.maxfilesizemb", 100)
|
||||||
viper.SetDefault("db.host", "localhost")
|
viper.SetDefault("db.host", "localhost")
|
||||||
viper.SetDefault("db.port", 5432)
|
viper.SetDefault("db.port", 5432)
|
||||||
viper.SetDefault("db.user", "scanfile")
|
viper.SetDefault("db.user", "scanfile")
|
||||||
viper.SetDefault("db.database", "scanfile")
|
viper.SetDefault("db.database", "scanfile")
|
||||||
viper.SetDefault("db.password", "CHANGEME")
|
viper.SetDefault("db.password", "CHANGEME")
|
||||||
viper.SetDefault("db.debug", false)
|
viper.SetDefault("db.debug", false)
|
||||||
viper.SetDefault("store.path", "./storage/files/")
|
viper.SetDefault("store.path", "./storage/files/")
|
||||||
}
|
}
|
||||||
|
|
||||||
func SaveConfig() error {
|
func SaveConfig() error {
|
||||||
|
|
|
@ -13,37 +13,36 @@ import (
|
||||||
|
|
||||||
// CreateFile() creates the filesystem object and the DB entry for a file
|
// CreateFile() creates the filesystem object and the DB entry for a file
|
||||||
func CreateFile(ctx context.Context, name string, fileBytes []byte) (sqlc.File, error) {
|
func CreateFile(ctx context.Context, name string, fileBytes []byte) (sqlc.File, error) {
|
||||||
queries := sqlc.New(pool)
|
queries := sqlc.New(pool)
|
||||||
file := sqlc.File{
|
file := sqlc.File{
|
||||||
Name: name,
|
Name: name,
|
||||||
}
|
}
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
bl2hash := blake2b.Sum256(fileBytes)
|
bl2hash := blake2b.Sum256(fileBytes)
|
||||||
file.Blake2 = bl2hash[:]
|
file.Blake2 = bl2hash[:]
|
||||||
file.Size = int64(len(fileBytes))
|
file.Size = int64(len(fileBytes))
|
||||||
|
|
||||||
file.Mimetype, _ = store.GetBytesFileType(fileBytes[:262])
|
file.Mimetype, _ = store.GetBytesFileType(fileBytes[:262])
|
||||||
|
|
||||||
file, err = queries.CreateFile(ctx, sqlc.CreateFileParams{})
|
file, err = queries.CreateFile(ctx, sqlc.CreateFileParams{})
|
||||||
if err == pgx.ErrNoRows {
|
if err == pgx.ErrNoRows {
|
||||||
slog.Info("File already exists", "file-uuid", file.ID.String())
|
slog.Info("File already exists", "file-uuid", file.ID.String())
|
||||||
return file, nil
|
return file, nil
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error saving file to database", "error", err, "file-name", name)
|
slog.Error("Error saving file to database", "error", err, "file-name", name)
|
||||||
err = nil
|
err = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//Using UUIDs instead of the file hash to make switching storage backends easier
|
||||||
//Using UUIDs instead of the file hash to make switching storage backends easier
|
|
||||||
_, err = store.SaveFile(file.ID.String(), fileBytes)
|
_, err = store.SaveFile(file.ID.String(), fileBytes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error saving file to disk", "error", err, "file-uuid", file.ID.String())
|
slog.Error("Error saving file to disk", "error", err, "file-uuid", file.ID.String())
|
||||||
errdel := queries.DeleteFile(ctx, file.ID)
|
errdel := queries.DeleteFile(ctx, file.ID)
|
||||||
if errdel != nil {
|
if errdel != nil {
|
||||||
slog.Error("Error deleting file from database", "error", errdel, "file-uuid", file.ID.String())
|
slog.Error("Error deleting file from database", "error", errdel, "file-uuid", file.ID.String())
|
||||||
}
|
}
|
||||||
return file, err
|
return file, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,6 @@ import (
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
var pool *pgxpool.Pool
|
var pool *pgxpool.Pool
|
||||||
|
|
||||||
func Connect() (*pgxpool.Pool, error) {
|
func Connect() (*pgxpool.Pool, error) {
|
||||||
|
@ -24,24 +23,24 @@ func Connect() (*pgxpool.Pool, error) {
|
||||||
Host: fmt.Sprintf("%s:%d", viper.GetString("db.host"), viper.GetInt("db.port")),
|
Host: fmt.Sprintf("%s:%d", viper.GetString("db.host"), viper.GetInt("db.port")),
|
||||||
Path: fmt.Sprintf("/%s", viper.GetString("db.database")),
|
Path: fmt.Sprintf("/%s", viper.GetString("db.database")),
|
||||||
}
|
}
|
||||||
conn, err := pgxpool.New(context.Background(), dburl.String())
|
conn, err := pgxpool.New(context.Background(), dburl.String())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Could not connect to DB", "url", dburl.Redacted(), "error", err)
|
slog.Error("Could not connect to DB", "url", dburl.Redacted(), "error", err)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
pool = conn
|
pool = conn
|
||||||
|
|
||||||
return conn, nil
|
return conn, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func Ping() error {
|
func Ping() error {
|
||||||
err := pool.Ping(context.Background())
|
err := pool.Ping(context.Background())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error connecting to DB", "error", err)
|
slog.Error("Error connecting to DB", "error", err)
|
||||||
return err
|
return err
|
||||||
} else {
|
} else {
|
||||||
slog.Info("DB: Pong")
|
slog.Info("DB: Pong")
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,22 +9,39 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetFileByID(fileID string) (sqlc.File, error) {
|
func GetFileByID(fileID string) (sqlc.File, error) {
|
||||||
var pgUUID pgtype.UUID
|
var pgUUID pgtype.UUID
|
||||||
err := pgUUID.Scan(fileID)
|
err := pgUUID.Scan(fileID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Unable to convert string to UUID", "file-uuid", fileID, "error", err)
|
slog.Error("Unable to convert string to UUID", "file-uuid", fileID, "error", err)
|
||||||
}
|
}
|
||||||
query := sqlc.New(pool)
|
query := sqlc.New(pool)
|
||||||
file, err := query.GetFileByUUID(context.Background(), pgUUID)
|
file, err := query.GetFileByUUID(context.Background(), pgUUID)
|
||||||
|
|
||||||
return file, nil
|
return file, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func InsertFileProperties(properties sqlc.InsertFilePropertiesParams) error {
|
func InsertFileProperties(properties sqlc.InsertFilePropertiesParams) error {
|
||||||
query := sqlc.New(pool)
|
query := sqlc.New(pool)
|
||||||
err := query.InsertFileProperties(context.Background(), properties)
|
err := query.InsertFileProperties(context.Background(), properties)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Unable to add file properties", "file-uuid", properties.ID.String(), "error", err)
|
slog.Error("Unable to add file properties", "file-uuid", properties.ID.String(), "error", err)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsertJsonResult() into one of the following tables:
|
||||||
|
// diec, msoffice_mraptor, msoffice_oleid, msoffice_olevba
|
||||||
|
func InsertJsonResult(fileID pgtype.UUID, data []byte, table string) error {
|
||||||
|
query := sqlc.New(pool)
|
||||||
|
var err error
|
||||||
|
switch table {
|
||||||
|
case "diec":
|
||||||
|
err = query.InsertFileDIEC(context.Background(), sqlc.InsertFileDIECParams{FileID: fileID, Data: data})
|
||||||
|
case "msoffice_oleid":
|
||||||
|
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("Unable to insert DIEC results", "file-uuid", fileID.String(), "error", err)
|
||||||
|
}
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,63 +10,63 @@ import (
|
||||||
|
|
||||||
// NewProcessingJob() Creates a new Processing Job in the Database
|
// NewProcessingJob() Creates a new Processing Job in the Database
|
||||||
func NewProcessingJob(ctx context.Context, fileid pgtype.UUID, jobType string) (sqlc.ProcessingJob, error) {
|
func NewProcessingJob(ctx context.Context, fileid pgtype.UUID, jobType string) (sqlc.ProcessingJob, error) {
|
||||||
job := sqlc.ProcessingJob{}
|
job := sqlc.ProcessingJob{}
|
||||||
job.FileID = fileid
|
job.FileID = fileid
|
||||||
query := sqlc.New(pool)
|
query := sqlc.New(pool)
|
||||||
job, err := query.CreateProcessingJob(ctx, fileid)
|
job, err := query.CreateProcessingJob(ctx, fileid)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Unable to create new processing job", "file-uuid", fileid.String())
|
slog.Error("Unable to create new processing job", "file-uuid", fileid.String())
|
||||||
return job, err
|
return job, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return job, nil
|
return job, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// StartProcessingJob() starts the job
|
// StartProcessingJob() starts the job
|
||||||
func StartProcessingJob(jobid int64) error {
|
func StartProcessingJob(jobid int64) error {
|
||||||
query := sqlc.New(pool)
|
query := sqlc.New(pool)
|
||||||
err := query.StartProcessingJob(context.Background(), jobid)
|
err := query.StartProcessingJob(context.Background(), jobid)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Unable to start processing job", "job-id", jobid)
|
slog.Error("Unable to start processing job", "job-id", jobid)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// FinishProcessingJob() marks the job as completed
|
// FinishProcessingJob() marks the job as completed
|
||||||
func FinishProcessingJob(jobid int64) error {
|
func FinishProcessingJob(jobid int64) error {
|
||||||
query := sqlc.New(pool)
|
query := sqlc.New(pool)
|
||||||
err := query.FinishProcessingJob(context.Background(), jobid)
|
err := query.FinishProcessingJob(context.Background(), jobid)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Unable to finish processing job", "job-id", jobid)
|
slog.Error("Unable to finish processing job", "job-id", jobid)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// FailProcessingJob() marks the job as completed
|
// FailProcessingJob() marks the job as completed
|
||||||
func FailProcessingJob(jobid int64, jobErr error) error {
|
func FailProcessingJob(jobid int64, jobErr error) error {
|
||||||
slog.Error("Job failed", "job-id", jobid, "error", jobErr)
|
slog.Error("Job failed", "job-id", jobid, "error", jobErr)
|
||||||
query := sqlc.New(pool)
|
query := sqlc.New(pool)
|
||||||
var params sqlc.FailProcessingJobParams
|
var params sqlc.FailProcessingJobParams
|
||||||
params.ID = jobid
|
params.ID = jobid
|
||||||
params.Error.String = jobErr.Error()
|
params.Error.String = jobErr.Error()
|
||||||
err := query.FailProcessingJob(context.Background(), params)
|
err := query.FailProcessingJob(context.Background(), params)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Unable to mark processing job as failed", "job-id", jobid, "error", err)
|
slog.Error("Unable to mark processing job as failed", "job-id", jobid, "error", err)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func AddProcessingJobMessage(jobid int64, message string) error {
|
func AddProcessingJobMessage(jobid int64, message string) error {
|
||||||
_, err := pool.Exec(context.Background(),
|
_, err := pool.Exec(context.Background(),
|
||||||
`
|
`
|
||||||
UPDATE processing_jobs
|
UPDATE processing_jobs
|
||||||
SET messages = messages || $2::JSONB
|
SET messages = messages || $2::JSONB
|
||||||
WHERE id = $1;
|
WHERE id = $1;
|
||||||
`,
|
`,
|
||||||
jobid, message)
|
jobid, message)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Unable to finish processing job", "job-id", jobid)
|
slog.Error("Unable to finish processing job", "job-id", jobid)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,4 +3,27 @@ INSERT INTO file_properties (
|
||||||
id, sha256, md5, libmagic_mime, libmagic_extension, libmagic_apple
|
id, sha256, md5, libmagic_mime, libmagic_extension, libmagic_apple
|
||||||
) VALUES ($1, $2, $3, $4, $5, $6);
|
) VALUES ($1, $2, $3, $4, $5, $6);
|
||||||
|
|
||||||
|
-- name: GetFileProperties :one
|
||||||
|
SELECT * FROM file_properties
|
||||||
|
WHERE id = $1;
|
||||||
|
|
||||||
|
-- name: InsertFileDIEC :exec
|
||||||
|
INSERT INTO diec (
|
||||||
|
file_id, data
|
||||||
|
) VALUES ($1, $2);
|
||||||
|
|
||||||
|
-- name: InsertFileMsofficeOleid :exec
|
||||||
|
INSERT INTO msoffice_oleid (
|
||||||
|
file_id, data
|
||||||
|
) VALUES ($1, $2);
|
||||||
|
|
||||||
|
-- name: InsertFileMsofficeOlevba :exec
|
||||||
|
INSERT INTO msoffice_olevba (
|
||||||
|
file_id, data
|
||||||
|
) VALUES ($1, $2);
|
||||||
|
|
||||||
|
-- name: InsertFileMsofficeMraptor :exec
|
||||||
|
INSERT INTO msoffice_mraptor (
|
||||||
|
file_id, data
|
||||||
|
) VALUES ($1, $2);
|
||||||
|
|
||||||
|
|
|
@ -10,31 +10,34 @@ import (
|
||||||
"git.jmbit.de/jmb/scanfile/server/internal/store"
|
"git.jmbit.de/jmb/scanfile/server/internal/store"
|
||||||
)
|
)
|
||||||
|
|
||||||
//BasicProcessing() determines type agnostic information about the file
|
// BasicProcessing() determines type agnostic information about the file
|
||||||
func BasicProcessing(job sqlc.ProcessingJob) error {
|
func BasicProcessing(job sqlc.ProcessingJob) error {
|
||||||
fileBytes, err := store.GetFileBytes(job.FileID.String())
|
fileBytes, err := store.GetFileBytes(job.FileID.String())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
database.FailProcessingJob(job.ID, err)
|
database.FailProcessingJob(job.ID, err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
sha256sum := sha256.Sum256(fileBytes)
|
sha256sum := sha256.Sum256(fileBytes)
|
||||||
md5sum := md5.Sum(fileBytes)
|
md5sum := md5.Sum(fileBytes)
|
||||||
fileCmdResult, err := FileCmd(job.FileID.String())
|
fileCmdResult, err := FileCmd(job.FileID.String())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error processing file", "file-uuid", job.FileID.String(), "error", err)
|
database.FailProcessingJob(job.ID, err)
|
||||||
return err
|
slog.Error("Error processing file", "file-uuid", job.FileID.String(), "error", err)
|
||||||
}
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
fileProperties := sqlc.InsertFilePropertiesParams{}
|
fileProperties := sqlc.InsertFilePropertiesParams{}
|
||||||
fileProperties.ID.Bytes = job.FileID.Bytes
|
fileProperties.ID.Bytes = job.FileID.Bytes
|
||||||
fileProperties.Md5 = md5sum[:]
|
fileProperties.Md5 = md5sum[:]
|
||||||
fileProperties.Sha256 = sha256sum[:]
|
fileProperties.Sha256 = sha256sum[:]
|
||||||
fileProperties.LibmagicMime.String = fileCmdResult.MimeType
|
fileProperties.LibmagicMime.String = fileCmdResult.MimeType
|
||||||
fileProperties.LibmagicApple.String = fileCmdResult.Apple
|
fileProperties.LibmagicApple.String = fileCmdResult.Apple
|
||||||
fileProperties.LibmagicExtension.String = fileCmdResult.Extension
|
fileProperties.LibmagicExtension.String = fileCmdResult.Extension
|
||||||
database.InsertFileProperties(fileProperties)
|
err = database.InsertFileProperties(fileProperties)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("Error inserting basic file properties into database", "file-uuid", job.FileID.String(), "error", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
25
server/internal/processing/basic/diec.go
Normal file
25
server/internal/processing/basic/diec.go
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
package basic
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log/slog"
|
||||||
|
"os/exec"
|
||||||
|
|
||||||
|
"git.jmbit.de/jmb/scanfile/server/internal/store"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DiecScan() runs diec -jdu on the file
|
||||||
|
func DiecScan(fileName string) ([]byte, error) {
|
||||||
|
var by []byte
|
||||||
|
filepath, err := store.AbsPath(fileName)
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("Error in DiecScan", "file-uuid", fileName, "error", err)
|
||||||
|
return by, err
|
||||||
|
}
|
||||||
|
cmd := exec.Command("/usr/bin/diec", "-jdu", filepath)
|
||||||
|
result, err := cmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
slog.Error("Error in DiecScan", "file-uuid", fileName, "error", err)
|
||||||
|
return by, err
|
||||||
|
}
|
||||||
|
return result, nil
|
||||||
|
}
|
|
@ -8,7 +8,6 @@ import (
|
||||||
"git.jmbit.de/jmb/scanfile/server/internal/store"
|
"git.jmbit.de/jmb/scanfile/server/internal/store"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
type FileCmdResult struct {
|
type FileCmdResult struct {
|
||||||
Type string
|
Type string
|
||||||
MimeType string
|
MimeType string
|
||||||
|
@ -16,38 +15,39 @@ type FileCmdResult struct {
|
||||||
Extension string
|
Extension string
|
||||||
}
|
}
|
||||||
|
|
||||||
//FileCmd() runs "/usr/bin/file" on the object. Should be replaced with libmagic bindings instead
|
// FileCmd() runs "/usr/bin/file" on the object. Should be replaced with libmagic bindings instead
|
||||||
func FileCmd(fileName string) (FileCmdResult, error) {
|
func FileCmd(fileName string) (FileCmdResult, error) {
|
||||||
var returnStruct FileCmdResult
|
var returnStruct FileCmdResult
|
||||||
filepath, err := store.AbsPath(fileName)
|
filepath, err := store.AbsPath(fileName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
slog.Error("Error in FileCmd", "file-uuid", fileName, "error", err)
|
||||||
return returnStruct, err
|
return returnStruct, err
|
||||||
}
|
}
|
||||||
cmd := exec.Command("/usr/bin/file", "-b", filepath)
|
cmd := exec.Command("/usr/bin/file", "-b", filepath)
|
||||||
result, err := cmd.Output()
|
result, err := cmd.Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error running file command", "file-uuid", fileName, "error", err)
|
slog.Error("Error running file command", "file-uuid", fileName, "error", err)
|
||||||
return returnStruct, err
|
return returnStruct, err
|
||||||
}
|
}
|
||||||
returnStruct.Type = strings.TrimRight(string(result), "\n ")
|
returnStruct.Type = strings.TrimRight(string(result), "\n ")
|
||||||
cmd = exec.Command("/usr/bin/file", "-b", "--mime-type", filepath)
|
cmd = exec.Command("/usr/bin/file", "-b", "--mime-type", filepath)
|
||||||
result, err = cmd.Output()
|
result, err = cmd.Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error running file (mime-type) command", "file-uuid", fileName, "error", err)
|
slog.Error("Error running file (mime-type) command", "file-uuid", fileName, "error", err)
|
||||||
return returnStruct, err
|
return returnStruct, err
|
||||||
}
|
}
|
||||||
returnStruct.MimeType = strings.TrimRight(string(result), "\n ")
|
returnStruct.MimeType = strings.TrimRight(string(result), "\n ")
|
||||||
cmd = exec.Command("/usr/bin/file", "-b", "--apple", filepath)
|
cmd = exec.Command("/usr/bin/file", "-b", "--apple", filepath)
|
||||||
result, err = cmd.Output()
|
result, err = cmd.Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error running file (apple) command", "file-uuid", fileName, "error", err)
|
slog.Error("Error running file (apple) command", "file-uuid", fileName, "error", err)
|
||||||
return returnStruct, err
|
return returnStruct, err
|
||||||
}
|
}
|
||||||
returnStruct.Apple = strings.TrimRight(string(result), "\n ")
|
returnStruct.Apple = strings.TrimRight(string(result), "\n ")
|
||||||
cmd = exec.Command("/usr/bin/file", "-b", "--extension", filepath)
|
cmd = exec.Command("/usr/bin/file", "-b", "--extension", filepath)
|
||||||
result, err = cmd.Output()
|
result, err = cmd.Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error running file (extension) command", "file-uuid", fileName, "error", err)
|
slog.Error("Error running file (extension) command", "file-uuid", fileName, "error", err)
|
||||||
return returnStruct, err
|
return returnStruct, err
|
||||||
}
|
}
|
||||||
returnStruct.Extension = strings.TrimRight(string(result), "\n ")
|
returnStruct.Extension = strings.TrimRight(string(result), "\n ")
|
||||||
|
|
|
@ -14,27 +14,27 @@ import (
|
||||||
|
|
||||||
var semaphore chan struct{}
|
var semaphore chan struct{}
|
||||||
var swg *sync.WaitGroup
|
var swg *sync.WaitGroup
|
||||||
//Used to determine if a task was started by a previous instance that stalled out or died
|
|
||||||
|
// Used to determine if a task was started by a previous instance that stalled out or died
|
||||||
var startup time.Time
|
var startup time.Time
|
||||||
|
|
||||||
func Setup(wg *sync.WaitGroup) {
|
func Setup(wg *sync.WaitGroup) {
|
||||||
semaphore = make(chan struct{}, viper.GetInt("processing.maxparallel"))
|
semaphore = make(chan struct{}, viper.GetInt("processing.maxparallel"))
|
||||||
startup = time.Now()
|
startup = time.Now()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Submit() starts the analysis process for a file.
|
// Submit() starts the analysis process for a file.
|
||||||
func Submit(ctx context.Context, file pgtype.UUID ) error {
|
func Submit(ctx context.Context, file pgtype.UUID) error {
|
||||||
job, err := database.NewProcessingJob(ctx, file, TypeBasic)
|
job, err := database.NewProcessingJob(ctx, file, TypeBasic)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Could not submit processing job", "error", err, "file-uuid", file)
|
slog.Error("Could not submit processing job", "error", err, "file-uuid", file)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
go processJob(job)
|
go processJob(job)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func processJob(job sqlc.ProcessingJob) {
|
func processJob(job sqlc.ProcessingJob) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,71 +5,76 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
const TypeBasic = "Basic"
|
const TypeBasic = "Basic"
|
||||||
|
|
||||||
// Microsoft Office Document
|
// Microsoft Office Document
|
||||||
const TypeMSOffice = "MSOffice"
|
const TypeMSOffice = "MSOffice"
|
||||||
|
|
||||||
// Microsoft Windows Portable Executable
|
// Microsoft Windows Portable Executable
|
||||||
const TypePE = "PE"
|
const TypePE = "PE"
|
||||||
|
|
||||||
// Linux/UNIX Executable Linkable Format
|
// Linux/UNIX Executable Linkable Format
|
||||||
const TypeELF = "ELF"
|
const TypeELF = "ELF"
|
||||||
|
|
||||||
// Java Archive (JAR)
|
// Java Archive (JAR)
|
||||||
const TypeJAR = "JAR"
|
const TypeJAR = "JAR"
|
||||||
|
|
||||||
// Archives (compressed etc.)
|
// Archives (compressed etc.)
|
||||||
const TypeArchive = "Archive"
|
const TypeArchive = "Archive"
|
||||||
|
|
||||||
// Anything not implemented (yet)
|
// Anything not implemented (yet)
|
||||||
const TypeOther = "Other"
|
const TypeOther = "Other"
|
||||||
|
|
||||||
var MSOfficeMime = []string{
|
var MSOfficeMime = []string{
|
||||||
"application/msword",
|
"application/msword",
|
||||||
"application/vnd.ms-excel",
|
"application/vnd.ms-excel",
|
||||||
"application/vnd.ms-powerpoint",
|
"application/vnd.ms-powerpoint",
|
||||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||||
"application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
"application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
||||||
}
|
}
|
||||||
|
|
||||||
var ELFMime = []string{
|
var ELFMime = []string{
|
||||||
"application/x-executable",
|
"application/x-executable",
|
||||||
}
|
}
|
||||||
|
|
||||||
var PEMime = []string{
|
var PEMime = []string{
|
||||||
"application/vnd.microsoft.portable-executable",
|
"application/vnd.microsoft.portable-executable",
|
||||||
}
|
}
|
||||||
|
|
||||||
var ArchiveMime = []string{
|
var ArchiveMime = []string{
|
||||||
"application/epub+zip",
|
"application/epub+zip",
|
||||||
"application/zip",
|
"application/zip",
|
||||||
"application/x-tar",
|
"application/x-tar",
|
||||||
"application/vnd.rar",
|
"application/vnd.rar",
|
||||||
"application/gzip",
|
"application/gzip",
|
||||||
"application/x-bzip2",
|
"application/x-bzip2",
|
||||||
"application/x-7z-compressed",
|
"application/x-7z-compressed",
|
||||||
"application/x-xz",
|
"application/x-xz",
|
||||||
"application/zstd",
|
"application/zstd",
|
||||||
"application/x-iso9660-image",
|
"application/x-iso9660-image",
|
||||||
"application/x-google-chrome-extension",
|
"application/x-google-chrome-extension",
|
||||||
"application/vnd.ms-cab-compressed",
|
"application/vnd.ms-cab-compressed",
|
||||||
"application/vnd.debian.binary-package",
|
"application/vnd.debian.binary-package",
|
||||||
"application/x-unix-archive",
|
"application/x-unix-archive",
|
||||||
"application/x-compress",
|
"application/x-compress",
|
||||||
"application/x-lzip",
|
"application/x-lzip",
|
||||||
"application/x-rpm",
|
"application/x-rpm",
|
||||||
"application/dicom",
|
"application/dicom",
|
||||||
}
|
}
|
||||||
|
|
||||||
func TypeFromMime(mimetype string) string {
|
func TypeFromMime(mimetype string) string {
|
||||||
if strings.HasPrefix(mimetype, "application") {
|
if strings.HasPrefix(mimetype, "application") {
|
||||||
if slices.Contains(ELFMime, mimetype) {
|
if slices.Contains(ELFMime, mimetype) {
|
||||||
return TypeELF
|
return TypeELF
|
||||||
} else if slices.Contains(PEMime, mimetype) {
|
} else if slices.Contains(PEMime, mimetype) {
|
||||||
return TypePE
|
return TypePE
|
||||||
} else if slices.Contains(MSOfficeMime, mimetype) {
|
} else if slices.Contains(MSOfficeMime, mimetype) {
|
||||||
return TypeMSOffice
|
return TypeMSOffice
|
||||||
} else if slices.Contains(ArchiveMime, mimetype) {
|
} else if slices.Contains(ArchiveMime, mimetype) {
|
||||||
return TypeArchive
|
return TypeArchive
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return TypeOther
|
return TypeOther
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ func RegisterRoutes() *http.ServeMux {
|
||||||
mux.HandleFunc("/", web.IndexWebHandler)
|
mux.HandleFunc("/", web.IndexWebHandler)
|
||||||
mux.HandleFunc("/about", web.AboutWebHandler)
|
mux.HandleFunc("/about", web.AboutWebHandler)
|
||||||
mux.HandleFunc("/files/{uuid}", web.FileViewWebHandler)
|
mux.HandleFunc("/files/{uuid}", web.FileViewWebHandler)
|
||||||
mux.HandleFunc("POST /upload", web.IndexUploadHandler)
|
mux.HandleFunc("POST /upload", web.IndexUploadHandler)
|
||||||
mux.Handle("/assets/", http.FileServer(http.FS(web.Files)))
|
mux.Handle("/assets/", http.FileServer(http.FS(web.Files)))
|
||||||
return mux
|
return mux
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,89 @@ import (
|
||||||
"github.com/jackc/pgx/v5/pgtype"
|
"github.com/jackc/pgx/v5/pgtype"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const getFileProperties = `-- name: GetFileProperties :one
|
||||||
|
SELECT id, sha256, md5, libmagic_mime, libmagic_extension, libmagic_apple FROM file_properties
|
||||||
|
WHERE id = $1
|
||||||
|
`
|
||||||
|
|
||||||
|
func (q *Queries) GetFileProperties(ctx context.Context, id pgtype.UUID) (FileProperty, error) {
|
||||||
|
row := q.db.QueryRow(ctx, getFileProperties, id)
|
||||||
|
var i FileProperty
|
||||||
|
err := row.Scan(
|
||||||
|
&i.ID,
|
||||||
|
&i.Sha256,
|
||||||
|
&i.Md5,
|
||||||
|
&i.LibmagicMime,
|
||||||
|
&i.LibmagicExtension,
|
||||||
|
&i.LibmagicApple,
|
||||||
|
)
|
||||||
|
return i, err
|
||||||
|
}
|
||||||
|
|
||||||
|
const insertFileDIEC = `-- name: InsertFileDIEC :exec
|
||||||
|
INSERT INTO diec (
|
||||||
|
file_id, data
|
||||||
|
) VALUES ($1, $2)
|
||||||
|
`
|
||||||
|
|
||||||
|
type InsertFileDIECParams struct {
|
||||||
|
FileID pgtype.UUID
|
||||||
|
Data []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) InsertFileDIEC(ctx context.Context, arg InsertFileDIECParams) error {
|
||||||
|
_, err := q.db.Exec(ctx, insertFileDIEC, arg.FileID, arg.Data)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const insertFileMsofficeMraptor = `-- name: InsertFileMsofficeMraptor :exec
|
||||||
|
INSERT INTO msoffice_mraptor (
|
||||||
|
file_id, data
|
||||||
|
) VALUES ($1, $2)
|
||||||
|
`
|
||||||
|
|
||||||
|
type InsertFileMsofficeMraptorParams struct {
|
||||||
|
FileID pgtype.UUID
|
||||||
|
Data []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) InsertFileMsofficeMraptor(ctx context.Context, arg InsertFileMsofficeMraptorParams) error {
|
||||||
|
_, err := q.db.Exec(ctx, insertFileMsofficeMraptor, arg.FileID, arg.Data)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const insertFileMsofficeOleid = `-- name: InsertFileMsofficeOleid :exec
|
||||||
|
INSERT INTO msoffice_oleid (
|
||||||
|
file_id, data
|
||||||
|
) VALUES ($1, $2)
|
||||||
|
`
|
||||||
|
|
||||||
|
type InsertFileMsofficeOleidParams struct {
|
||||||
|
FileID pgtype.UUID
|
||||||
|
Data []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) InsertFileMsofficeOleid(ctx context.Context, arg InsertFileMsofficeOleidParams) error {
|
||||||
|
_, err := q.db.Exec(ctx, insertFileMsofficeOleid, arg.FileID, arg.Data)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
const insertFileMsofficeOlevba = `-- name: InsertFileMsofficeOlevba :exec
|
||||||
|
INSERT INTO msoffice_olevba (
|
||||||
|
file_id, data
|
||||||
|
) VALUES ($1, $2)
|
||||||
|
`
|
||||||
|
|
||||||
|
type InsertFileMsofficeOlevbaParams struct {
|
||||||
|
FileID pgtype.UUID
|
||||||
|
Data []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (q *Queries) InsertFileMsofficeOlevba(ctx context.Context, arg InsertFileMsofficeOlevbaParams) error {
|
||||||
|
_, err := q.db.Exec(ctx, insertFileMsofficeOlevba, arg.FileID, arg.Data)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
const insertFileProperties = `-- name: InsertFileProperties :exec
|
const insertFileProperties = `-- name: InsertFileProperties :exec
|
||||||
INSERT INTO file_properties (
|
INSERT INTO file_properties (
|
||||||
id, sha256, md5, libmagic_mime, libmagic_extension, libmagic_apple
|
id, sha256, md5, libmagic_mime, libmagic_extension, libmagic_apple
|
||||||
|
|
|
@ -33,7 +33,7 @@ func GetFileType(fileId string) (string, error) {
|
||||||
return kind.MIME.Value, nil
|
return kind.MIME.Value, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
//Returns the MimeType for a []byte
|
// Returns the MimeType for a []byte
|
||||||
// We only have to pass the file header = first 261 bytes
|
// We only have to pass the file header = first 261 bytes
|
||||||
func GetBytesFileType(data []byte) (string, error) {
|
func GetBytesFileType(data []byte) (string, error) {
|
||||||
kind, err := filetype.Match(data)
|
kind, err := filetype.Match(data)
|
||||||
|
|
|
@ -33,7 +33,7 @@ func SaveFile(fileName string, fileBytes []byte) (string, error) {
|
||||||
slog.Error("could not create file on disk,", "error", err, "file-uuid", fileName)
|
slog.Error("could not create file on disk,", "error", err, "file-uuid", fileName)
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
defer osFile.Close()
|
defer osFile.Close()
|
||||||
_, err = osFile.Write(fileBytes)
|
_, err = osFile.Write(fileBytes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("could not write file content,", "error", err, "file-uuid", fileName)
|
slog.Error("could not write file content,", "error", err, "file-uuid", fileName)
|
||||||
|
@ -74,15 +74,15 @@ func AbsPath(fileName string) (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetFileBytes(fileName string) ([]byte, error) {
|
func GetFileBytes(fileName string) ([]byte, error) {
|
||||||
var fileBytes []byte
|
var fileBytes []byte
|
||||||
file, err := OpenFile(fileName)
|
file, err := OpenFile(fileName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fileBytes, err
|
return fileBytes, err
|
||||||
}
|
}
|
||||||
_, err = file.Read(fileBytes)
|
_, err = file.Read(fileBytes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("could not read file content,", "error", err, "file-uuid", fileName)
|
slog.Error("could not read file content,", "error", err, "file-uuid", fileName)
|
||||||
return fileBytes, err
|
return fileBytes, err
|
||||||
}
|
}
|
||||||
return fileBytes, nil
|
return fileBytes, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,16 +34,16 @@ func main() {
|
||||||
log.SetOutput(os.Stderr)
|
log.SetOutput(os.Stderr)
|
||||||
slog.SetDefault(slog.New(slog.NewTextHandler(os.Stderr, nil)))
|
slog.SetDefault(slog.New(slog.NewTextHandler(os.Stderr, nil)))
|
||||||
config.ReadConfigFile("")
|
config.ReadConfigFile("")
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
database.Connect()
|
database.Connect()
|
||||||
database.Ping()
|
database.Ping()
|
||||||
store.SetupStore()
|
store.SetupStore()
|
||||||
processing.Setup(&wg)
|
processing.Setup(&wg)
|
||||||
if viper.GetBool("web.tls") {
|
if viper.GetBool("web.tls") {
|
||||||
slog.Info("Starting HTTPS server")
|
slog.Info("Starting HTTPS server")
|
||||||
server.NewServer().ListenAndServeTLS(viper.GetString("web.cert"), viper.GetString("web.key"))
|
server.NewServer().ListenAndServeTLS(viper.GetString("web.cert"), viper.GetString("web.key"))
|
||||||
} else {
|
} else {
|
||||||
slog.Info("Starting HTTP server")
|
slog.Info("Starting HTTP server")
|
||||||
server.NewServer().ListenAndServe()
|
server.NewServer().ListenAndServe()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,7 +8,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func FileViewWebHandler(w http.ResponseWriter, r *http.Request) {
|
func FileViewWebHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
file, err := database.FileByUUID(r.PathValue("uuid"))
|
file, err := database.FileByUUID(r.PathValue("uuid"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error getting File in FileViewWebHandler", "error", err, "file-uuid", r.PathValue("uuid"))
|
slog.Error("Error getting File in FileViewWebHandler", "error", err, "file-uuid", r.PathValue("uuid"))
|
||||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
|
|
|
@ -15,7 +15,7 @@ func IndexWebHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error rendering in IndexWebHandler", "error", err)
|
slog.Error("Error rendering in IndexWebHandler", "error", err)
|
||||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,27 +33,27 @@ func IndexUploadHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error parsing form in IndexUploadHandler", "error", err)
|
slog.Error("Error parsing form in IndexUploadHandler", "error", err)
|
||||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
fileBytes, err := io.ReadAll(fileData)
|
fileBytes, err := io.ReadAll(fileData)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error reading file in IndexUploadHandler", "error", err)
|
slog.Error("Error reading file in IndexUploadHandler", "error", err)
|
||||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
file, err := database.CreateFile(r.Context(), fileHeader.Filename, fileBytes)
|
file, err := database.CreateFile(r.Context(), fileHeader.Filename, fileBytes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error saving file in IndexUploadHandler", "error", err)
|
slog.Error("Error saving file in IndexUploadHandler", "error", err)
|
||||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
component := UploadSuccessCard(file)
|
component := UploadSuccessCard(file)
|
||||||
err = component.Render(r.Context(), w)
|
err = component.Render(r.Context(), w)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Error rendering in IndexUploadHandler", "error", err)
|
slog.Error("Error rendering in IndexUploadHandler", "error", err)
|
||||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -498,7 +498,7 @@ var internalSvgData = map[string]string{
|
||||||
<path d="M8 12a2 2 0 0 0 2-2V8H8" />
|
<path d="M8 12a2 2 0 0 0 2-2V8H8" />
|
||||||
<path d="M14 12a2 2 0 0 0 2-2V8h-2" />`,
|
<path d="M14 12a2 2 0 0 0 2-2V8h-2" />`,
|
||||||
"navigation": `<polygon points="3 11 22 2 13 21 11 13 3 11" />`,
|
"navigation": `<polygon points="3 11 22 2 13 21 11 13 3 11" />`,
|
||||||
"minus": `<path d="M5 12h14" />`,
|
"minus": `<path d="M5 12h14" />`,
|
||||||
"list-check": `<path d="M11 18H3" />
|
"list-check": `<path d="M11 18H3" />
|
||||||
<path d="m15 18 2 2 4-4" />
|
<path d="m15 18 2 2 4-4" />
|
||||||
<path d="M16 12H3" />
|
<path d="M16 12H3" />
|
||||||
|
@ -2471,7 +2471,7 @@ var internalSvgData = map[string]string{
|
||||||
"toy-brick": `<rect width="18" height="12" x="3" y="8" rx="1" />
|
"toy-brick": `<rect width="18" height="12" x="3" y="8" rx="1" />
|
||||||
<path d="M10 8V5c0-.6-.4-1-1-1H6a1 1 0 0 0-1 1v3" />
|
<path d="M10 8V5c0-.6-.4-1-1-1H6a1 1 0 0 0-1 1v3" />
|
||||||
<path d="M19 8V5c0-.6-.4-1-1-1h-3a1 1 0 0 0-1 1v3" />`,
|
<path d="M19 8V5c0-.6-.4-1-1-1h-3a1 1 0 0 0-1 1v3" />`,
|
||||||
"wrench": `<path d="M14.7 6.3a1 1 0 0 0 0 1.4l1.6 1.6a1 1 0 0 0 1.4 0l3.77-3.77a6 6 0 0 1-7.94 7.94l-6.91 6.91a2.12 2.12 0 0 1-3-3l6.91-6.91a6 6 0 0 1 7.94-7.94l-3.76 3.76z" />`,
|
"wrench": `<path d="M14.7 6.3a1 1 0 0 0 0 1.4l1.6 1.6a1 1 0 0 0 1.4 0l3.77-3.77a6 6 0 0 1-7.94 7.94l-6.91 6.91a2.12 2.12 0 0 1-3-3l6.91-6.91a6 6 0 0 1 7.94-7.94l-3.76 3.76z" />`,
|
||||||
"rectangle-horizontal": `<rect width="20" height="12" x="2" y="6" rx="2" />`,
|
"rectangle-horizontal": `<rect width="20" height="12" x="2" y="6" rx="2" />`,
|
||||||
"map-pin-check-inside": `<path d="M20 10c0 4.993-5.539 10.193-7.399 11.799a1 1 0 0 1-1.202 0C9.539 20.193 4 14.993 4 10a8 8 0 0 1 16 0" />
|
"map-pin-check-inside": `<path d="M20 10c0 4.993-5.539 10.193-7.399 11.799a1 1 0 0 1-1.202 0C9.539 20.193 4 14.993 4 10a8 8 0 0 1 16 0" />
|
||||||
<path d="m9 10 2 2 4-4" />`,
|
<path d="m9 10 2 2 4-4" />`,
|
||||||
|
@ -4515,7 +4515,7 @@ var internalSvgData = map[string]string{
|
||||||
"ticket-plus": `<path d="M2 9a3 3 0 0 1 0 6v2a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2v-2a3 3 0 0 1 0-6V7a2 2 0 0 0-2-2H4a2 2 0 0 0-2 2Z" />
|
"ticket-plus": `<path d="M2 9a3 3 0 0 1 0 6v2a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2v-2a3 3 0 0 1 0-6V7a2 2 0 0 0-2-2H4a2 2 0 0 0-2 2Z" />
|
||||||
<path d="M9 12h6" />
|
<path d="M9 12h6" />
|
||||||
<path d="M12 9v6" />`,
|
<path d="M12 9v6" />`,
|
||||||
"wifi-zero": `<path d="M12 20h.01" />`,
|
"wifi-zero": `<path d="M12 20h.01" />`,
|
||||||
"type-outline": `<path d="M14 16.5a.5.5 0 0 0 .5.5h.5a2 2 0 0 1 0 4H9a2 2 0 0 1 0-4h.5a.5.5 0 0 0 .5-.5v-9a.5.5 0 0 0-.5-.5h-3a.5.5 0 0 0-.5.5V8a2 2 0 0 1-4 0V5a2 2 0 0 1 2-2h16a2 2 0 0 1 2 2v3a2 2 0 0 1-4 0v-.5a.5.5 0 0 0-.5-.5h-3a.5.5 0 0 0-.5.5Z" />`,
|
"type-outline": `<path d="M14 16.5a.5.5 0 0 0 .5.5h.5a2 2 0 0 1 0 4H9a2 2 0 0 1 0-4h.5a.5.5 0 0 0 .5-.5v-9a.5.5 0 0 0-.5-.5h-3a.5.5 0 0 0-.5.5V8a2 2 0 0 1-4 0V5a2 2 0 0 1 2-2h16a2 2 0 0 1 2 2v3a2 2 0 0 1-4 0v-.5a.5.5 0 0 0-.5-.5h-3a.5.5 0 0 0-.5.5Z" />`,
|
||||||
"circle-pause": `<circle cx="12" cy="12" r="10" />
|
"circle-pause": `<circle cx="12" cy="12" r="10" />
|
||||||
<line x1="10" x2="10" y1="15" y2="9" />
|
<line x1="10" x2="10" y1="15" y2="9" />
|
||||||
|
@ -4657,7 +4657,7 @@ var internalSvgData = map[string]string{
|
||||||
<path d="M4 18v3" />
|
<path d="M4 18v3" />
|
||||||
<path d="M8 14v7" />`,
|
<path d="M8 14v7" />`,
|
||||||
"chevron-down": `<path d="m6 9 6 6 6-6" />`,
|
"chevron-down": `<path d="m6 9 6 6 6-6" />`,
|
||||||
"chevron-up": `<path d="m18 15-6-6-6 6" />`,
|
"chevron-up": `<path d="m18 15-6-6-6 6" />`,
|
||||||
"dice-5": `<rect width="18" height="18" x="3" y="3" rx="2" ry="2" />
|
"dice-5": `<rect width="18" height="18" x="3" y="3" rx="2" ry="2" />
|
||||||
<path d="M16 8h.01" />
|
<path d="M16 8h.01" />
|
||||||
<path d="M8 8h.01" />
|
<path d="M8 8h.01" />
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
// templui component icon - version: main installed by templui v0.71.0
|
// templui component icon - version: main installed by templui v0.71.0
|
||||||
package icon
|
package icon
|
||||||
|
|
||||||
// This file is auto generated
|
// This file is auto generated
|
||||||
// Using Lucide icons version 0.507.0
|
// Using Lucide icons version 0.507.0
|
||||||
var ALargeSmall = Icon("a-large-small")
|
var ALargeSmall = Icon("a-large-small")
|
||||||
|
|
Loading…
Add table
Reference in a new issue