diff --git a/Justfile b/Justfile new file mode 100644 index 0000000..4e8f9a2 --- /dev/null +++ b/Justfile @@ -0,0 +1,21 @@ +export CGO_ENABLED := "0" + +# build locally +build: tidy + go build -gcflags "-l" -ldflags "-w -s" . + +# run golangci-lint +lint: + golangci-lint run + +# run go mod tidy +tidy: + go mod tidy + +# install to user path +install: tidy build + go install -gcflags "-l" -ldflags "-w -s" . + +# run go test +test: + go test ./... diff --git a/Makefile b/Makefile deleted file mode 100644 index 40f895b..0000000 --- a/Makefile +++ /dev/null @@ -1,17 +0,0 @@ -CGO_ENABLED=0 -export CGO_ENABLED - -.DEFAULT_GOAL := build -.PHONY: lint tidy build - -golangci-lint: - golangci-lint run - -tidy: - go mod tidy - -build: - go build -gcflags "-l" -ldflags "-w -s" -o pher main.go - -install: - go install -gcflags "-l" -ldflags "-w -s" . diff --git a/README.md b/README.md index 028f680..862ca19 100644 --- a/README.md +++ b/README.md @@ -13,16 +13,17 @@ there are a few differences: - Wikilinks are supported thanks to abhinav's [extension](https://github.com/abhinav/goldmark-wikilink). - No CSS framework. -- Comes as a small standalone binary (~9M). No need for a runtime. +- Comes as a small standalone binary (~9M). + No need for a runtime. - Some visual tweaks (personal preference). - The atom feed contains only dated entries. -- Flatter file structure (no "rooting" every page). Let webservers handle the - routing and beautifying. +- Flatter file structure (no "rooting" every page). + Let webservers handle the routing and beautifying. ## Installation ```bash -$ go install github.com/mstcl/pher/v2@v2.3.2 +$ go install github.com/mstcl/pher/v3@v3.0.0 ``` ## Usage @@ -66,8 +67,8 @@ footer: ## Frontmatter -pher reads in frontmatter in YAML format. Available fields and default values -are: +pher reads in frontmatter in YAML format. +Available fields and default values are: ```yaml --- @@ -82,6 +83,7 @@ toc: false # Render a table of contents for this entry showHeader: true # Show the header (title, description, tags, date) layout: "list" # Available values: "grid", "list", "log". Only effective for index.md files. --- + ``` ## To do @@ -112,9 +114,9 @@ layout: "list" # Available values: "grid", "list", "log". Only effective for ind ### Editing templates -pher embeds the templates in `web/templates` with go:embed. This means pher can -run as a standalone binary. Unfortunately, to modify the templates, we have to -recompile. +pher embeds the templates in `web/templates` with go:embed. +This means pher can run as a standalone binary. +Unfortunately, to modify the templates, we have to recompile. ### Removing html extension @@ -130,8 +132,9 @@ location / { } ``` -Additionally, setting `keepExtension: false` will strip ".html" from href -links. This might be necessary if you use weird browsers that break redirects. +Additionally, setting `keepExtension: +false` will strip ".html" from href links. +This might be necessary if you use weird browsers that break redirects. ## Credits diff --git a/go.mod b/go.mod index 56835b8..c22757f 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/mstcl/pher/v2 +module github.com/mstcl/pher/v3 go 1.22.2 diff --git a/internal/assetpath/assetpath.go b/internal/assetpath/assetpath.go new file mode 100644 index 0000000..97df8a5 --- /dev/null +++ b/internal/assetpath/assetpath.go @@ -0,0 +1,7 @@ +package assetpath + +type AssetPath string + +func (ap AssetPath) String() string { + return string(ap) +} diff --git a/internal/checks/checks.go b/internal/checks/checks.go deleted file mode 100644 index f1e41dc..0000000 --- a/internal/checks/checks.go +++ /dev/null @@ -1,57 +0,0 @@ -package checks - -import ( - "fmt" - "os" - - "github.com/mattn/go-zglob" -) - -// Check for markdown files under directory -func EntryPresent(f string) (bool, error) { - // we want to check all nested files - files, err := zglob.Glob(f + "/**/*.md") - if err != nil { - return false, err - } - - if len(files) == 0 { - return false, nil - } - - return true, nil -} - -// Return true/false if a path exists/doesn't exist -func FileExist(f string) (bool, error) { - if _, err := os.Stat(f); err != nil { - if os.IsNotExist(err) { - return false, nil - } else { - return false, err - } - } - - return true, nil -} - -// Ensure a directory exists -func DirExist(dir string) error { - if err := os.Mkdir(dir, 0o755); err == nil { - return nil - } else if os.IsExist(err) { - // check that the existing path is a directory - info, err := os.Stat(dir) - if err != nil { - return err - } - - if !info.IsDir() { - return fmt.Errorf("path exists but is not a directory") - } - - return nil - } - - return nil -} diff --git a/internal/cli/cli.go b/internal/cli/cli.go index ef7a3ca..032071e 100644 --- a/internal/cli/cli.go +++ b/internal/cli/cli.go @@ -1,267 +1,122 @@ +// Package cli [TODO] package cli import ( "context" - "embed" - "flag" "fmt" - "html/template" "log/slog" - "os" - "path/filepath" "time" - "github.com/lmittmann/tint" - "github.com/mattn/go-zglob" - "github.com/mstcl/pher/v2/internal/config" - "github.com/mstcl/pher/v2/internal/render" - "github.com/mstcl/pher/v2/internal/state" - "golang.org/x/sync/errgroup" - - "github.com/mstcl/pher/v2/internal/checks" - "github.com/mstcl/pher/v2/internal/feed" + "github.com/mstcl/pher/v3/internal/config" + "github.com/mstcl/pher/v3/internal/state" ) const ( - templateDir = "web/template" - version = "v2.3.2" -) - -var ( - Templates embed.FS - configFile, outDir, inDir string - dryRun bool - showVersion bool - debug bool + relTemplateDir = "web/template" + relStaticDir = "web/static" + relStaticOutputDir = "static" ) -func Parse() error { - start := time.Now() - +func Handler() error { var err error - flag.BoolVar( - &showVersion, - "v", - false, - "Show version and exit", - ) + start := time.Now() // start execution timer - flag.StringVar( - &configFile, - "c", - "config.yaml", - "Path to config file", - ) - flag.StringVar( - &inDir, - "i", - ".", - "Input directory", - ) - flag.StringVar( - &outDir, - "o", - "_site", - "Output directory", - ) - flag.BoolVar( - &dryRun, - "d", - false, - "Don't render (dry run)", - ) - flag.BoolVar( - &debug, - "debug", - false, - "Verbose (debug) mode", - ) - flag.Parse() + s := state.Init() // this is our lobal state - var lvl slog.Level + parseFlags(&s) // parse all our CLI flags here (onto the state) - if debug { - lvl = slog.LevelDebug - } else { - lvl = slog.LevelInfo - } + logger := createLogger(s.Debug) // create our "global" logger - logger := slog.New(tint.NewHandler(os.Stderr, &tint.Options{ - Level: lvl, - TimeFormat: time.Kitchen, - })) + initRuntimeInfo() // get global runtime info - logger.Debug("parsed flags", - slog.String("inDir", inDir), - slog.String("outDir", outDir), - slog.String("configFile", configFile), - slog.Bool("version", showVersion), - slog.Bool("dryRun", dryRun), - slog.Bool("debug", debug), + logger.Debug( + "gathered runtime info", + slog.String("revision", Revision), + slog.String("version", Version), + slog.String("go_version", GoVersion), + slog.String("git_version", Version), + slog.Time("build_date", BuildDate), ) - if showVersion { - fmt.Printf("pher %v\n", version) + logger.Debug("parsed flags", + slog.String("inDir", s.InputDir), + slog.String("outDir", s.OutputDir), + slog.String("configFile", s.ConfigFile), + slog.Bool("version", s.ShowVersion), + slog.Bool("dryRun", s.DryRun), + slog.Bool("debug", s.Debug), + ) + // show version and exit if that's the case + if s.ShowVersion { + fmt.Printf("pher %v\n", Version) return nil } - // This is pher's s - s := state.Init() - s.DryRun = dryRun - - // Sanitize input directory - inDir, err = filepath.Abs(inDir) - if err != nil { - return fmt.Errorf("absolute path: %w", err) - } + // sanitize paths + sanitize(&s, logger) - if err = checks.DirExist(outDir); err != nil { - return fmt.Errorf("output directory: %w", err) - } - - s.InDir = inDir - - logger.Debug("sanitized input directory", slog.String("path", inDir)) - - // Sanitize output directory - outDir, err = filepath.Abs(outDir) - if err != nil { - return fmt.Errorf("absolute path: %w", err) - } - - if err = checks.DirExist(outDir); err != nil { - return fmt.Errorf("output directory: %w", err) - } - - s.OutDir = outDir - - logger.Debug("sanitized output directory", slog.String("path", outDir)) - - // Sanitize configuration file - configFile, err = filepath.Abs(configFile) - if err != nil { - return fmt.Errorf("absolute path: %w", err) - } - - if fileExist, err := checks.FileExist(configFile); err != nil { - return fmt.Errorf("stat: %v", err) - } else if !fileExist { - return fmt.Errorf("missing: %s", configFile) + // create output directory + if err := createDir(s.OutputDir); err != nil { + return err } + logger.Debug("created output directory", slog.String("dir", s.OutputDir)) - logger.Debug("sanitized config file", slog.String("path", configFile)) - - // Read configuration - s.Config, err = config.Read(configFile) + // parse configuration + s.Config, err = config.Read(s.ConfigFile) if err != nil { return err } + logger.Debug("parsed configuration", slog.Any("config", s.Config)) - logger.Debug("read configuration", slog.Any("config", s.Config)) - - // Clean output directory + // clean output directory if !s.DryRun { - logger.Info("cleaning output directory") + exceptions := []string{relStaticOutputDir} - files, err := filepath.Glob(outDir + "/*") - if err != nil { - return fmt.Errorf("glob files: %w", err) - } - - if err = removeFiles(files); err != nil { - return fmt.Errorf("rm files: %w", err) + if err := cleanOutputDir(s.OutputDir, exceptions); err != nil { + return err } + logger.Info("cleaned output directory", slog.Any("exceptions", exceptions)) } else { - logger.Debug("dry run on: skipped cleaning output directory") + logger.Debug("dry run — skipped cleaning output directory") } - // Initiate templates - s.Templates = template.Must(template.ParseFS( - Templates, filepath.Join(templateDir, "*"))) + // initiate templates + initTemplates(&s) + logger.Debug("loaded and initialized templates") - logger.Debug("loaded templates") - - // Grab files and reorder so indexes are processed last - files, err := zglob.Glob(s.InDir + "/**/*.md") + // get source files from input directory + s.NodePaths, err = getNodePaths(s.InputDir, logger) if err != nil { - return fmt.Errorf("glob files: %w", err) - } - - files = filterHiddenFiles(inDir, files) - s.Files = reorderFiles(files) - - logger.Debug("finalized list of files to process", slog.Any("files", s.Files)) - - logger.Info("extracting metadata and file relations") - - // Update the state with various metadata - if err := extractExtras(&s, logger); err != nil { - return err - } - - logger.Info("creating file listings") - - // Update the state with file listings, like backlinks and similar entries - if err := makeFileListing(&s, logger); err != nil { return err } + logger.Debug("found source files", slog.Any("paths", s.NodePaths)) - // NOTE: The next three processes can run concurrently as they are - // independent from each other - - // Construct and render atom feeds - logger.Info("creating atom feed") - - feedGroup, _ := errgroup.WithContext(context.Background()) - feedGroup.Go(func() error { - atom, err := feed.Construct(&s, logger) - if err != nil { - return err - } - - return feed.Write(&s, atom) - }, - ) - - // Copy asset dirs/files over to output directory - logger.Info("syncing assets") - - moveGroup, _ := errgroup.WithContext(context.Background()) - moveGroup.Go(func() error { - if err := syncAssets(context.Background(), &s, logger); err != nil { - return err - } - - return nil - }, - ) - - // Create beautiful HTML - logger.Info("templating all files") - - renderGroup, _ := errgroup.WithContext(context.Background()) - renderGroup.Go(func() error { - return render.Render(context.Background(), &s, logger) - }) - - // Wait for all goroutines to finish - if err := feedGroup.Wait(); err != nil { + // TODO: refactor + // update the state with various metadata + if err := extractExtras(&s, logger); err != nil { return err } + logger.Info("extracted metadata and file relations") - if err := moveGroup.Wait(); err != nil { + // TODO: refactor + // update the state with file listings, like backlinks and similar entries + if err := populateNodePathLinks(&s, logger); err != nil { return err } + logger.Info("created file index") - if err := renderGroup.Wait(); err != nil { + // do the rest of our tasks concurrently + if err := runConcurrentJobs(context.Background(), &s, logger); err != nil { return err } - end := time.Since(start) - - logger.Info("done", slog.Duration("execution time", end), slog.Int("number of files", len(files))) + logger.Info( + "completed", + slog.Duration("execution time", end), + slog.Int("number of files", len(s.NodePaths)), + ) return nil } diff --git a/internal/cli/concurrent.go b/internal/cli/concurrent.go new file mode 100644 index 0000000..be3ff9d --- /dev/null +++ b/internal/cli/concurrent.go @@ -0,0 +1,84 @@ +package cli + +import ( + "context" + "log/slog" + + "github.com/mstcl/pher/v3/internal/feed" + "github.com/mstcl/pher/v3/internal/render" + "github.com/mstcl/pher/v3/internal/state" + "golang.org/x/sync/errgroup" +) + +// runConcurrentJobs executes three jobs of the rest of the program concurrently +// as they are independent of each other: +// 1. Create the atom feed +// 2. Copy assets to the output directory +// 3. Copy static files to the output directory +// 4. Render all source files to HTML to the output directory +func runConcurrentJobs(ctx context.Context, s *state.State, logger *slog.Logger) error { + // construct and render atom feeds + constructFeedGroup, _ := errgroup.WithContext(ctx) + constructFeedGroup.Go(func() error { + atom, err := feed.Construct(s, logger) + if err != nil { + return err + } + + return feed.Write(s, atom) + }, + ) + + logger.Info("created atom feed") + + // copy asset dirs/files over to output directory + copyUserAssetsGroup, _ := errgroup.WithContext(ctx) + copyUserAssetsGroup.Go(func() error { + if err := copyUserAssets(ctx, s, logger); err != nil { + return err + } + + return nil + }, + ) + + logger.Info("synced user assets") + + // copy static content to the output directory + copyStaticGroup, _ := errgroup.WithContext(ctx) + copyStaticGroup.Go(func() error { + if err := copyStatic(s, logger); err != nil { + return err + } + + return nil + }, + ) + logger.Info("copied static files") + + // render all markdown files + renderGroup, _ := errgroup.WithContext(ctx) + renderGroup.Go(func() error { + return render.Render(ctx, s, logger) + }) + logger.Info("templated all source files") + + // wait for all goroutines to finish + if err := constructFeedGroup.Wait(); err != nil { + return err + } + + if err := copyUserAssetsGroup.Wait(); err != nil { + return err + } + + if err := copyStaticGroup.Wait(); err != nil { + return err + } + + if err := renderGroup.Wait(); err != nil { + return err + } + + return nil +} diff --git a/internal/cli/extract.go b/internal/cli/extract.go index 7a9d724..0aaf867 100644 --- a/internal/cli/extract.go +++ b/internal/cli/extract.go @@ -8,11 +8,13 @@ import ( "sort" "strings" - "github.com/mstcl/pher/v2/internal/convert" - "github.com/mstcl/pher/v2/internal/listing" - "github.com/mstcl/pher/v2/internal/source" - "github.com/mstcl/pher/v2/internal/state" - "github.com/mstcl/pher/v2/internal/tag" + "github.com/mstcl/pher/v3/internal/assetpath" + "github.com/mstcl/pher/v3/internal/convert" + "github.com/mstcl/pher/v3/internal/nodepath" + "github.com/mstcl/pher/v3/internal/nodepathlink" + "github.com/mstcl/pher/v3/internal/source" + "github.com/mstcl/pher/v3/internal/state" + "github.com/mstcl/pher/v3/internal/tag" ) // Process files to build up the entry data for all files, the tags data, and @@ -27,15 +29,18 @@ func extractExtras(s *state.State, logger *slog.Logger) error { tagsCount := make(map[string]int) // tagsListing: tags listing - files with this tag (key: tag name) - tagsListing := make(map[string][]listing.Listing) + tagsListing := make(map[string][]nodepathlink.NodePathLink) // First loop, can do most things - for _, f := range s.Files { - child := logger.With(slog.String("filepath", f), slog.String("context", "extracting extras")) + for _, np := range s.NodePaths { + child := logger.With( + slog.Any("nodepath", np), + slog.String("context", "extracting extras"), + ) - entry := s.Entries[f] + entry := s.NodeMap[np] - file, err := os.Open(f) + file, err := os.Open(np.String()) if err != nil { return err } @@ -88,10 +93,10 @@ func extractExtras(s *state.State, logger *slog.Logger) error { child.Debug("extracted links", slog.Any("links", links)) // Resolve basic vars - path := filepath.Dir(f) - base := convert.FileBase(f) + path := filepath.Dir(np.String()) + base := np.Base() title := convert.Title(md.Title, base) - href := convert.Href(f, s.InDir, false) + href := np.Href(s.InputDir, false) isDir := base == "index" if s.Config.IsExt { @@ -103,7 +108,7 @@ func extractExtras(s *state.State, logger *slog.Logger) error { entry.Body = rendered.HTML entry.Href = href entry.ChromaCSS = rendered.ChromaCSS - s.Entries[f] = entry + s.NodeMap[np] = entry // Update assets from internal links for _, v := range links.InternalLinks { @@ -113,10 +118,10 @@ func extractExtras(s *state.State, logger *slog.Logger) error { return nil } - s.Assets[ref] = true + s.UserAssetMap[assetpath.AssetPath(ref)] = true } - child.Debug("updated assets with internal links paths", slog.Any("assets", s.Assets)) + child.Debug("updated assets with internal links paths", slog.Any("assets", s.UserAssetMap)) // Update assets and wikilinks from backlinks for _, v := range links.BackLinks { @@ -129,23 +134,23 @@ func extractExtras(s *state.State, logger *slog.Logger) error { // Process links with extensions as external files // like images/gifs if len(filepath.Ext(ref)) > 0 { - s.Assets[ref] = true + s.UserAssetMap[assetpath.AssetPath(ref)] = true } ref += ".md" // Save backlinks - linkedEntry := s.Entries[ref] + linkedEntry := s.NodeMap[nodepath.NodePath(ref)] linkedEntry.Backlinks = append( linkedEntry.Backlinks, - listing.Listing{ + nodepathlink.NodePathLink{ Href: href, Title: title, Description: entry.Metadata.Description, IsDir: isDir, }, ) - s.Entries[ref] = linkedEntry + s.NodeMap[nodepath.NodePath(ref)] = linkedEntry } child.Debug("updated assets and wiklinks from backlinks") @@ -156,7 +161,7 @@ func extractExtras(s *state.State, logger *slog.Logger) error { for _, v := range md.Tags { tagsCount[v] += 1 - tagsListing[v] = append(tagsListing[v], listing.Listing{ + tagsListing[v] = append(tagsListing[v], nodepathlink.NodePathLink{ Href: href, Title: title, Description: entry.Metadata.Description, @@ -164,7 +169,11 @@ func extractExtras(s *state.State, logger *slog.Logger) error { }) } - child.Debug("updated tags", slog.Any("tagsCount", tagsCount), slog.Any("tagsListing", tagsListing)) + child.Debug( + "updated tags", + slog.Any("tagsCount", tagsCount), + slog.Any("tagsListing", tagsListing), + ) } logger.Debug("proceeding to second loop") @@ -173,19 +182,22 @@ func extractExtras(s *state.State, logger *slog.Logger) error { // // NOTE: Entries that share tags are related // Hence dependent on tags listing (tl) - for _, f := range s.Files { - child := logger.With(slog.String("filepath", f), slog.String("context", "extracting extras")) + for _, np := range s.NodePaths { + child := logger.With( + slog.Any("nodepath", np), + slog.String("context", "extracting extras"), + ) - entry := s.Entries[f] + entry := s.NodeMap[np] if entry.Metadata.Draft || len(entry.Metadata.Tags) == 0 { continue } // listings: all related links - listings := []listing.Listing{} + listings := []nodepathlink.NodePathLink{} // relatedListings: unique related links - relatedListings := []listing.Listing{} + relatedListings := []nodepathlink.NodePathLink{} // Get all files with similar tags for _, t := range entry.Metadata.Tags { @@ -198,7 +210,7 @@ func extractExtras(s *state.State, logger *slog.Logger) error { for _, l := range listings { filename := strings.TrimSuffix(l.Href, filepath.Ext(l.Href)) - if filepath.Join(s.InDir, filename) == strings.TrimSuffix(f, ".md") { + if filepath.Join(s.InputDir, filename) == strings.TrimSuffix(np.String(), ".md") { continue } @@ -215,7 +227,7 @@ func extractExtras(s *state.State, logger *slog.Logger) error { child.Debug("extracted related links", slog.Any("relatedlinks", relatedListings)) // Update entry - s.Entries[f] = entry + s.NodeMap[np] = entry } // Transform maps of tags count and tags listing to give a sorted slice of tags. @@ -232,9 +244,9 @@ func extractExtras(s *state.State, logger *slog.Logger) error { for _, k := range keys { tags = append(tags, tag.Tag{Name: k, Count: tagsCount[k], Links: tagsListing[k]}) } - s.Tags = append(s.Tags, tags...) + s.NodeTags = append(s.NodeTags, tags...) - logger.Debug("extracted tags", slog.Any("tags", s.Tags)) + logger.Debug("extracted tags", slog.Any("tags", s.NodeTags)) return nil } diff --git a/internal/cli/fileops.go b/internal/cli/fileops.go index d72cf14..faa0df9 100644 --- a/internal/cli/fileops.go +++ b/internal/cli/fileops.go @@ -2,43 +2,100 @@ package cli import ( "context" + "errors" "fmt" + "io" + "io/fs" "log/slog" "os" "path/filepath" + "slices" - "github.com/mstcl/pher/v2/internal/convert" - "github.com/mstcl/pher/v2/internal/state" + "github.com/mattn/go-zglob" + "github.com/mstcl/pher/v3/internal/nodepath" + "github.com/mstcl/pher/v3/internal/state" "golang.org/x/sync/errgroup" - - "github.com/mstcl/pher/v2/internal/checks" ) -// Move all index.md from files to the end so they are processed last -func reorderFiles(files []string) []string { - var notIndex []string +func createDir(dir string) error { + if err := os.MkdirAll(dir, 0o755); err != nil { + return fmt.Errorf("os.MkdirAll %s: %w", dir, err) + } + + return nil +} + +// getNodePaths return the nodes we need to process by recursively glob for all +// markdown files, then run sanitizeNodeFiles() on them +func getNodePaths(inputDir string, logger *slog.Logger) ([]nodepath.NodePath, error) { + nodepathsRaw, err := zglob.Glob(filepath.Join(inputDir, "**", "*.md")) + if err != nil { + return nil, fmt.Errorf("glob files: %w", err) + } - var index []string + var nodepaths []nodepath.NodePath + for _, np := range nodepathsRaw { + nodepaths = append(nodepaths, nodepath.NodePath(np)) + } + + // sanitize files found + nodepaths = sanitizeNodePaths(nodepaths, logger) + logger.Debug("sanitized source files", slog.Any("paths", nodepathsRaw)) + + return nodepaths, nil +} + +// cleanOutput removes all files and directories in outputDir, +// except for the ones listed in the exceptions list. +// WARN: on error keep deleting everything, and report errors at the end +// this is to ensure a clean state. +func cleanOutputDir(outputDir string, exceptions []string) error { + entries, err := os.ReadDir(outputDir) + if err != nil { + return fmt.Errorf("os.ReadDir %s: %w", outputDir, err) + } - for _, i := range files { - base := convert.FileBase(i) - if base == "index" { - index = append(index, i) + var removeErrors []error + for _, entry := range entries { + // skip those in the exceptions + if slices.Contains(exceptions, entry.Name()) { continue } - notIndex = append(notIndex, i) + pathToRemove := filepath.Join(outputDir, entry.Name()) + if err := os.RemoveAll(pathToRemove); err != nil { + removeErrors = append( + removeErrors, + fmt.Errorf("os.RemoveAll %s: %w", pathToRemove, err), + ) + } + } + + if len(removeErrors) > 0 { + return errors.Join(removeErrors...) } - return append(notIndex, index...) + return nil } -// Delete files -func removeFiles(files []string) error { - for _, c := range files { - if err := os.RemoveAll(c); err != nil { - return fmt.Errorf("removing old output files: %w", err) - } +// copyFile copies inPath to outPath using ioReader and ioWriter +func copyFile(inPath string, outPath string, permission os.FileMode) error { + inFile, err := os.Open(inPath) + if err != nil { + return fmt.Errorf("os.Open %s: %w", inPath, err) + } + defer inFile.Close() + + outFile, err := os.OpenFile(outPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, permission) + if err != nil { + return fmt.Errorf("os.OpenFile %s: %w", outPath, err) + } + defer outFile.Close() + + // Copy the content using a stream + _, err = io.Copy(outFile, inFile) + if err != nil { + return fmt.Errorf("io.Copy %s to %s: %w", inPath, outPath, err) } return nil @@ -46,54 +103,99 @@ func removeFiles(files []string) error { // Move extra files like assets (images, fonts, css) over to output, preserving // the file structure. -func syncAssets(ctx context.Context, s *state.State, logger *slog.Logger) error { +func copyUserAssets(ctx context.Context, s *state.State, logger *slog.Logger) error { eg, _ := errgroup.WithContext(ctx) - for f := range s.Assets { - f := f - - child := logger.With(slog.String("filepath", f), slog.String("context", "copying asset")) + for assetPath := range s.UserAssetMap { + child := logger.With( + slog.Any("assetpath", assetPath), + slog.String("context", "copying asset"), + ) child.Debug("submitting goroutine") eg.Go(func() error { - // want our assets to go from inDir/a/b/c/image.png -> outDir/a/b/c/image.png - rel, _ := filepath.Rel(s.InDir, f) - path := filepath.Join(s.OutDir, rel) - - // Make dir on filesystem - if err := checks.DirExist(filepath.Dir(path)); err != nil { - return fmt.Errorf("make directory: %w", err) - } - - // Copy from f to out - b, err := os.ReadFile(f) - if err != nil { - return fmt.Errorf("read file: %w", err) - } - - if err = os.WriteFile(path, b, 0o644); err != nil { - return fmt.Errorf("write file: %w", err) + // NOTE: want our assets to go from inDir/a/b/c/image.png -> outDir/a/b/c/image.png + relToInputDir, _ := filepath.Rel(s.InputDir, assetPath.String()) + outputPath := filepath.Join(s.OutputDir, relToInputDir) + parentOutputDir := filepath.Dir(outputPath) + + // Make equivalent directory in output directory + if err := os.MkdirAll(parentOutputDir, 0o755); err != nil { + return fmt.Errorf("os.MkdirAll %s: %v", parentOutputDir, err) } - return nil + // Copy file to target directory + return copyFile(assetPath.String(), outputPath, 0o644) }) } return eg.Wait() } -// Filter hidden files from files -func filterHiddenFiles(inDir string, files []string) []string { - newFiles := []string{} +// copyStatic +func copyStatic(s *state.State, logger *slog.Logger) error { + outputDir := filepath.Join(s.OutputDir, relStaticOutputDir) - for _, f := range files { - if rel, _ := filepath.Rel(inDir, f); rel[0] == 46 { - continue + // make static directory in output directory + if err := os.MkdirAll(outputDir, 0o755); err != nil { + return fmt.Errorf("os.MkdirAll %s: %w", outputDir, err) + } + + logger.Debug("created static output directory", slog.String("dir", outputDir)) + + staticFS, err := fs.Sub(EmbedFS, relStaticDir) + if err != nil { + return fmt.Errorf("create subfilesystem %s: %w", relStaticDir, err) + } + + logger.Debug("created static subfilesystem", slog.String("dir", relStaticDir)) + + // walk through all files and directories in the `staticfs`. + // starting at the root of the sub-filesystem. + if err := fs.WalkDir(staticFS, ".", func(inputPath string, d fs.DirEntry, err error) error { + if err != nil { + return err } - newFiles = append(newFiles, f) + // skip directories and only process files + if d.IsDir() { + return nil + } + + // construct the destination path for the file + outputPath := filepath.Join(outputDir, inputPath) + parentOutputDir := filepath.Dir(outputPath) + + // create the destination directory if it doesn't exist + if err := os.MkdirAll(parentOutputDir, 0o755); err != nil { + return fmt.Errorf("os.MkdirAll %s: %w", parentOutputDir, err) + } + + // open the input file from fs + inputFile, err := staticFS.Open(inputPath) + if err != nil { + return err + } + defer inputFile.Close() + + // create output file and copy content from inputFile to outputFile + outputFile, err := os.Create(outputPath) + if err != nil { + return err + } + defer outputFile.Close() + + if _, err := io.Copy(outputFile, inputFile); err != nil { + return err + } + + return nil + }); err != nil { + return fmt.Errorf("fs.WalkDir: %w", err) } - return newFiles + logger.Debug("walked static subfilesystem", slog.String("outputDir", outputDir)) + + return nil } diff --git a/internal/cli/flags.go b/internal/cli/flags.go new file mode 100644 index 0000000..6d7fa66 --- /dev/null +++ b/internal/cli/flags.go @@ -0,0 +1,19 @@ +package cli + +import ( + "flag" + + "github.com/mstcl/pher/v3/internal/state" +) + +func parseFlags(s *state.State) { + flag.BoolVar(&s.ShowVersion, "v", false, "Show version and exit") + flag.BoolVar(&s.DryRun, "d", false, "Don't render (dry run)") + flag.BoolVar(&s.Debug, "debug", false, "Verbose (debug) mode") + + flag.StringVar(&s.ConfigFile, "c", "config.yaml", "Path to config file") + flag.StringVar(&s.InputDir, "i", ".", "Input directory") + flag.StringVar(&s.OutputDir, "o", "_site", "Output directory") + + flag.Parse() +} diff --git a/internal/cli/listentry.go b/internal/cli/listentry.go new file mode 100644 index 0000000..f2d7892 --- /dev/null +++ b/internal/cli/listentry.go @@ -0,0 +1,318 @@ +package cli + +import ( + "fmt" + "html/template" + "log/slog" + "os" + "path/filepath" + "strings" + + "github.com/mattn/go-zglob" + "github.com/mstcl/pher/v3/internal/convert" + "github.com/mstcl/pher/v3/internal/metadata" + "github.com/mstcl/pher/v3/internal/nodepath" + "github.com/mstcl/pher/v3/internal/nodepathlink" + "github.com/mstcl/pher/v3/internal/state" +) + +type populateNodePathLinksHelperInput struct { + parentNodePath nodepath.NodePath + childrenNodePath []nodepath.NodePath +} + +// populateNodePathLinks finds all nodegroups. For each of them, find the +// children nodepaths (can either be nodes or nodegroups), and calls +// populateNodesListEntryHelper() on children nodepaths to populate the +// children's nodepath links +func populateNodePathLinks(s *state.State, logger *slog.Logger) error { + s.NodegroupWithoutIndexMap = make(map[nodepath.NodePath]bool) + + nodepathsRaw, err := zglob.Glob(filepath.Join(s.InputDir, "**", "*")) + if err != nil { + return err + } + + var nodepaths []nodepath.NodePath + for _, np := range nodepathsRaw { + nodepaths = append(nodepaths, nodepath.NodePath(np)) + } + + // add the root "." as well + nodepaths = append(nodepaths, nodepath.NodePath(s.InputDir)) + + logger.Debug("found all nodepaths", slog.Any("nodepaths", nodepaths)) + + // Go through and drop all nodepaths that aren't nodegroups Glob nodegroups + // for further nodepaths and call the helper function to populate their + // NodePathLink slice + for _, np := range nodepaths { + childNodePath := logger.With( + slog.Any("nodepath", np), + slog.String("context", "populateNodePathLinks"), + ) + + isNodegroup, err := np.IsNodegroup() + if err != nil { + return err + } + + if !isNodegroup { + continue + } + + // Find immediate children of the nodepath + childrenRaw, err := filepath.Glob(filepath.Join(np.String(), "*")) + if err != nil { + return err + } + + childNodePath.Debug("found children files", slog.Any("files", childrenRaw)) + + var children []nodepath.NodePath + for _, child := range childrenRaw { + children = append(children, nodepath.NodePath(child)) + } + + // Run the helper on the parent and its chidlren + if err := populateNodePathLinksHelper(s, &populateNodePathLinksHelperInput{ + parentNodePath: np, + childrenNodePath: children, + }, logger); err != nil { + return err + } + } + + // Add index files to NodegroupWithoutIndexMap + // TODO: refactor this + for np := range s.NodegroupWithoutIndexMap { + entry := s.NodeMap[np] + + // add index to our files to render + s.NodePaths = append(s.NodePaths, np) + md := metadata.Default() + + // we have inDir/a/b/c/index.md + // want to extract c + // i.e. title is the folder name + + // inDir/a/b/c/index.md -> a/b/c/index.md + rel, _ := filepath.Rel(s.InputDir, np.String()) + + // a/b/c/index.md -> a/b/c -> a/b, c + _, dir := filepath.Split(filepath.Dir(rel)) + + // title is c + md.Title = dir + + // update Metadata + entry.Metadata = *md + + // update record + s.NodeMap[np] = entry + } + + return nil +} + +// populateNodePathLinksHelper loops through all immediate children inside the +// current parent (i.parentNodePath) to populate and return NodePathLinksMap, +// NodegroupWithoutIndexMap, and SkippedNodePathMap. +func populateNodePathLinksHelper( + s *state.State, + i *populateNodePathLinksHelperInput, + logger *slog.Logger, +) error { + // this is the nodegroup index path, we expect it to be at /path/to/nodegroup/index.md + nodegroupIndexPath := nodepath.NodePath(filepath.Join(i.parentNodePath.String(), "index.md")) + + // is the parent nodegroup a log type? If so cache this because we will use + // it later on for further logic + isLog := s.NodeMap[nodegroupIndexPath].Metadata.Layout == "log" + + // for each child, make some decisions + for _, np := range i.childrenNodePath { + childLogger := logger.With( + slog.Any("filepath", np), + slog.String("context", "child listing"), + ) + + // stat files/directories + info, err := os.Stat(np.String()) + if err != nil { + return err + } + + IsDir := info.Mode().IsDir() + + // begin filtering invalid/unwated files/directories and/or fast + // failing unexpected behaviour + + // throw error if parent's view is log type but child is subdirectory + if IsDir && isLog { + childLogger.Error("found a directory in log parent -- this is unexpected") + + return err + } + + // if a nodegroup, skip if it's empty + if IsDir { + // check if the nodepath is actually a node group + nodegroupHasChildren, err := np.HasChildren() + if err != nil { + return err + } + + if !nodegroupHasChildren { + childLogger.Debug("skipping empty directory found") + + continue + } + } + + // Skip hidden files/directories + relativePath, _ := filepath.Rel(s.InputDir, np.String()) + if strings.HasPrefix(relativePath, ".") { + childLogger.Debug("skipping hidden file/directory") + + continue + } + + // Skip non-markdown files + fileExtension := filepath.Ext(np.String()) + if !IsDir && fileExtension != ".md" { + childLogger.Debug("skipping non-markdown file") + + continue + } + + // Skip index files, unlisted ones + if np.Base() == "index" { + childLogger.Debug("skipping index file") + + continue + } + + if s.NodeMap[np].Metadata.Unlisted { + childLogger.Debug("skipping unlisted file") + + continue + } + + // checks complete, now we consider only files that are valid + + // don't render these files later + s.SkippedNodePathMap[np] = isLog + + // append to missing index if index doesn't exist for a directory + if IsDir { + indexFile := filepath.Join(np.String(), "index.md") + + _, err := os.Stat(indexFile) + if os.IsNotExist(err) { + s.NodegroupWithoutIndexMap[np+"/index.md"] = true // TODO: change the behaviour so we don't have to append the /index.md as the key + + childLogger.Debug("index doesn't exist, added to missing index state") + } else if err != nil { + return fmt.Errorf("stat %s: %w", s.ConfigFile, err) + } + } + + // prepare the link + l := nodepathlink.NodePathLink{} + + // grab href target, different for file vs. dir + l.IsDir = IsDir + + // construct the rest of the NodePathLink fields. Additionally add in + // relevant rendering data fields like html body and tags for parents + // with log view configured. + // + // we also update np in place as it will be used as the key in further + // maps, so that if it's a parent the key should have index.md at the + // end + if IsDir { + // isolate the directory name, this is the title and href + npName, err := filepath.Rel(i.parentNodePath.String(), np.String()) + if err != nil { + return err + } + + l.Title = npName + + if s.Config.IsExt { + l.Href = filepath.Join(npName, "index.html") + } else { + l.Href = npName + } + + // switch nodegroup key to index for title & description + np = nodepath.NodePath(filepath.Join(np.String(), "index.md")) + childLogger.Debug( + "replaced nodegroup key with index path", + slog.String("np", np.String()), + ) + } else { + npName := np.Href(i.parentNodePath.String(), false) + + if s.Config.IsExt { + l.Href = npName + ".html" + } else { + l.Href = npName + } + } + + // grab nodepath title + // if metadata has title -> use that. + // if not -> use filename only if nodepath is not a directory + // as directory title is already set above + title := s.NodeMap[np].Metadata.Title + if len(title) > 0 { + l.Title = title + } else if !l.IsDir { + l.Title = np.Base() + } + + // grab nodepath description + l.Description = s.NodeMap[np].Metadata.Description + + // handle log nodegroup logic + if isLog { + l.Body = template.HTML(s.NodeMap[np].Body) + + // if date is present convert it + date := s.NodeMap[np].Metadata.Date + if len(date) > 0 { + l.Date, l.MachineDate, err = convert.Date(date) + if err != nil { + return err + } + } + + // if dateUpdated is present convert it + dateUpdated := s.NodeMap[np].Metadata.DateUpdated + if len(dateUpdated) > 0 { + l.DateUpdated, l.MachineDateUpdated, err = convert.Date(dateUpdated) + if err != nil { + return err + } + } + + // set link tags + l.Tags = s.NodeMap[np].Metadata.Tags + } + + // if node is pinned we prepend it to the links map slice value, else we append it + if s.NodeMap[np].Metadata.Pinned { + s.NodePathLinksMap[nodegroupIndexPath] = append( + []nodepathlink.NodePathLink{l}, + s.NodePathLinksMap[nodegroupIndexPath]...) + + continue + } else { + s.NodePathLinksMap[nodegroupIndexPath] = append(s.NodePathLinksMap[nodegroupIndexPath], l) + } + } + + return nil +} diff --git a/internal/cli/logger.go b/internal/cli/logger.go new file mode 100644 index 0000000..4509fa4 --- /dev/null +++ b/internal/cli/logger.go @@ -0,0 +1,24 @@ +package cli + +import ( + "log/slog" + "os" + "time" + + "github.com/lmittmann/tint" +) + +func createLogger(debug bool) *slog.Logger { + var lvl slog.Level + + if debug { + lvl = slog.LevelDebug + } else { + lvl = slog.LevelInfo + } + + return slog.New(tint.NewHandler(os.Stderr, &tint.Options{ + Level: lvl, + TimeFormat: time.Kitchen, + })) +} diff --git a/internal/cli/makelisting.go b/internal/cli/makelisting.go deleted file mode 100644 index b32bd6b..0000000 --- a/internal/cli/makelisting.go +++ /dev/null @@ -1,273 +0,0 @@ -package cli - -import ( - "html/template" - "log/slog" - "os" - "path/filepath" - - "github.com/mattn/go-zglob" - "github.com/mstcl/pher/v2/internal/checks" - "github.com/mstcl/pher/v2/internal/convert" - "github.com/mstcl/pher/v2/internal/listing" - "github.com/mstcl/pher/v2/internal/metadata" - "github.com/mstcl/pher/v2/internal/state" -) - -// Get all directories, and call listChildren() to populate the files within. -// -// * listing: listing entries of parents. -// -// * missing: bool map of parent index paths that are missing. -// -// * skip: bool map of files that should not be rendered (because its parents -// is displaying a log) -func makeFileListing(s *state.State, logger *slog.Logger) error { - // Initialize missing map - s.Missing = make(map[string]bool) - - files, err := zglob.Glob(s.InDir + "/**/*") - if err != nil { - return err - } - - files = append(files, s.InDir) - - logger.Debug("found files to process listing", slog.Any("files", files)) - - // Go through everything that aren't files - // Glob those directories for both files and directories - // These are PARENTS with listings - for _, f := range files { - child := logger.With(slog.String("filepath", f), slog.String("context", "file listing")) - - // Stat files/directories - info, err := os.Stat(f) - if err != nil { - return err - } - - // Only process directories - if info.Mode().IsRegular() { - continue - } - - // Glob under directory - children, err := filepath.Glob(f + "/*") - if err != nil { - return err - } - - child.Debug("found children files", slog.Any("files", children)) - - if err := makeFileListingHelper(s, &helperInput{ - parentDir: f, - files: children, - }, logger); err != nil { - return err - } - } - - // Update files - for f := range s.Missing { - entry := s.Entries[f] - - // add index to our files to render - s.Files = append(s.Files, f) - md := metadata.Default() - - // we have inDir/a/b/c/index.md - // want to extract c - // i.e. title is the folder name - - // inDir/a/b/c/index.md -> a/b/c/index.md - rel, _ := filepath.Rel(s.InDir, f) - - // a/b/c/index.md -> a/b/c -> a/b, c - _, dir := filepath.Split(filepath.Dir(rel)) - - // title is c - md.Title = dir - - // update Metadata - entry.Metadata = *md - - // update record - s.Entries[f] = entry - } - - return nil -} - -type helperInput struct { - parentDir string - files []string -} - -// Sub-function to loop through depth 1 children inside the current parent -// (parentDir) to populate and return the listing map, the missing map, and the -// skip map. Additional calls constructListingEntry() to make individual listing -// entry. -func makeFileListingHelper( - s *state.State, - i *helperInput, - logger *slog.Logger, -) error { - // Whether to render children - // Use source file as key for consistency - dirIndex := filepath.Join(i.parentDir, "index.md") - isLog := s.Entries[dirIndex].Metadata.Layout == "log" - - for _, f := range i.files { - child := logger.With(slog.String("filepath", f), slog.String("context", "child listing")) - - // Stat files/directories - info, err := os.Stat(f) - if err != nil { - return err - } - - IsDir := info.Mode().IsDir() - - // Skip hidden files - if rel, _ := filepath.Rel(s.InDir, f); rel[0] == 46 { - child.Debug("skipped hidden file") - - continue - } - - // Skip non-markdon files - if !IsDir && filepath.Ext(f) != ".md" { - child.Debug("skipped non markdown file") - - continue - } - - // Skip index files, unlisted ones - if convert.FileBase(f) == "index" || s.Entries[f].Metadata.Unlisted { - child.Debug("skip index files and unlisted files") - - continue - } - - // Don't render these files later - s.Skip[f] = isLog - - // Throw error if parent's view is Log but child is subdirectory - if IsDir && isLog { - child.Error("found a directory in log parent - this is unexpected") - - return err - } - - // Skip directories without any entry (markdown files) - entryPresent, err := checks.EntryPresent(f) - if err != nil { - return err - } - - if IsDir && !entryPresent { - child.Debug("empty directory found - skipping") - - continue - } - - // Append to missing if index doesn't exist - if IsDir { - indexExists, err := checks.FileExist(f + "/index.md") - if err != nil { - return err - } - - if !indexExists { - child.Debug("index doesn't exist, adding to missing state") - - s.Missing[f+"/index.md"] = true - } - } - - // Prepare the listing - l := listing.Listing{} - - // Grab href target, different for file vs. dir - l.IsDir = IsDir - - // Construct the rest of the listing entry fields. Additionally add in - // relevant rendering data fields like html body and tags for parents - // with log view configured. - if l.IsDir { - target, err := filepath.Rel(i.parentDir, f) - if err != nil { - return err - } - - l.Title = target - - if s.Config.IsExt { - target += "/index.html" - } - - l.Href = target - // Switch target to index for title & description - f = filepath.Join(f, "index.md") - } else { - target := convert.Href(f, i.parentDir, false) - if s.Config.IsExt { - target += ".html" - } - - l.Href = target - } - - // Grab titles and description. - // If metadata has title -> use that. - // If not -> use filename only if entry is not a directory - title := s.Entries[f].Metadata.Title - if len(title) > 0 { - l.Title = title - } else if !l.IsDir { - l.Title = convert.FileBase(f) - } - - l.Description = s.Entries[f].Metadata.Description - - // Log entries for log layout - - if isLog { - l.Body = template.HTML(s.Entries[f].Body) - - date := s.Entries[f].Metadata.Date - if len(date) > 0 { - l.Date, l.MachineDate, err = convert.Date(date) - if err != nil { - return err - } - } - - dateUpdated := s.Entries[f].Metadata.DateUpdated - if len(dateUpdated) > 0 { - l.DateUpdated, l.MachineDateUpdated, err = convert.Date(dateUpdated) - if err != nil { - return err - } - } - - l.Tags = s.Entries[f].Metadata.Tags - } - - // Now we act on the index files - if IsDir { - f += "/index.md" - } - - // Append to listing map - if s.Entries[f].Metadata.Pinned { - s.Listings[dirIndex] = append([]listing.Listing{l}, s.Listings[dirIndex]...) - continue - } - - s.Listings[dirIndex] = append(s.Listings[dirIndex], l) - } - - return nil -} diff --git a/internal/cli/runtime.go b/internal/cli/runtime.go new file mode 100644 index 0000000..49c894a --- /dev/null +++ b/internal/cli/runtime.go @@ -0,0 +1,42 @@ +package cli + +import ( + "runtime" + "runtime/debug" + "time" +) + +var ( + Version = "unknown" + GoVersion = runtime.Version() + Revision = "unknown" + BuildDate time.Time + DirtyBuild = true +) + +// initRuntimeInfo grabs package info +// stolen from https://www.piotrbelina.com/blog/go-build-info-debug-readbuildinfo-ldflags/ +func initRuntimeInfo() { + info, ok := debug.ReadBuildInfo() + if !ok { + return + } + + if info.Main.Version != "" { + Version = info.Main.Version + } + + for _, kv := range info.Settings { + if kv.Value == "" { + continue + } + switch kv.Key { + case "vcs.revision": + Revision = kv.Value + case "vcs.time": + BuildDate, _ = time.Parse(time.RFC3339, kv.Value) + case "vcs.modified": + DirtyBuild = kv.Value == "true" + } + } +} diff --git a/internal/cli/sanitize.go b/internal/cli/sanitize.go new file mode 100644 index 0000000..dde80d3 --- /dev/null +++ b/internal/cli/sanitize.go @@ -0,0 +1,109 @@ +package cli + +import ( + "fmt" + "log/slog" + "os" + "path/filepath" + "strings" + + "github.com/mstcl/pher/v3/internal/nodepath" + "github.com/mstcl/pher/v3/internal/state" +) + +func sanitize(s *state.State, logger *slog.Logger) error { + var err error + + // Sanitize configuration file + s.ConfigFile, err = filepath.Abs(s.ConfigFile) + if err != nil { + return fmt.Errorf("absolute path: %w", err) + } + + // Check whether configuration file exists + _, err = os.Stat(s.ConfigFile) + if os.IsNotExist(err) { + return fmt.Errorf("missing: %s", s.ConfigFile) + } else if err != nil { + return fmt.Errorf("os.Stat %s: %w", s.ConfigFile, err) + } + + logger.Debug("sanitized config file", slog.String("path", s.ConfigFile)) + + // Sanitize input directory + s.InputDir, err = filepath.Abs(s.InputDir) + if err != nil { + return fmt.Errorf("filepath.Abs: %w", err) + } + + logger.Debug("sanitized input directory", slog.String("path", s.InputDir)) + + // Sanitize output directory + s.OutputDir, err = filepath.Abs(s.OutputDir) + if err != nil { + return fmt.Errorf("filepath.Abs: %w", err) + } + + logger.Debug("sanitized output directory", slog.String("path", s.OutputDir)) + + return nil +} + +// reorderNodeFiles resorts nodes slice so that all group index are moved to the +// end so they are processed last +func reorderNodeFiles(nodepaths []nodepath.NodePath) []nodepath.NodePath { + var notIndex []nodepath.NodePath + var index []nodepath.NodePath + + for _, i := range nodepaths { + base := i.Base() + if base == "index" { + index = append(index, i) + continue + } + + notIndex = append(notIndex, i) + } + + return append(notIndex, index...) +} + +// dropHiddenFiles drops files where any path component is hidden (starts with a dot). +func dropHiddenFiles(nodepaths []nodepath.NodePath) []nodepath.NodePath { + var newFiles []nodepath.NodePath + + for _, np := range nodepaths { + if !isPathHidden(np.String()) { + newFiles = append(newFiles, np) + } + } + + return newFiles +} + +// isPathHidden checks if any component of a path starts with a dot. +func isPathHidden(p string) bool { + // split the path into components. + parts := strings.Split(p, string(filepath.Separator)) + + // iterate through each part and check for a leading dot. + for _, part := range parts { + if strings.HasPrefix(part, ".") && part != "." && part != ".." { + return true + } + } + + return false +} + +func sanitizeNodePaths(nodepaths []nodepath.NodePath, logger *slog.Logger) []nodepath.NodePath { + // sanitize by removing all hidden files + nodepaths = dropHiddenFiles(nodepaths) + logger.Debug("dropped hidden files") + + // reorder the list so indexes are processed last + nodepaths = reorderNodeFiles(nodepaths) + logger.Debug("finalized list of files to process") + + return nodepaths +} diff --git a/internal/cli/template.go b/internal/cli/template.go new file mode 100644 index 0000000..145bb5f --- /dev/null +++ b/internal/cli/template.go @@ -0,0 +1,25 @@ +package cli + +import ( + "embed" + "html/template" + "path" + "path/filepath" + + "github.com/mstcl/pher/v3/internal/state" +) + +var EmbedFS embed.FS + +func initTemplates(s *state.State) { + funcMap := getTemplateFuncMap() + tmpl := template.New("main") + tmpl = tmpl.Funcs(funcMap) + s.Templates = template.Must(tmpl.ParseFS(EmbedFS, filepath.Join(relTemplateDir, "*"))) +} + +func getTemplateFuncMap() template.FuncMap { + return template.FuncMap{ + "joinPath": path.Join, + } +} diff --git a/internal/convert/convert.go b/internal/convert/convert.go index 42b1b5d..07316f5 100644 --- a/internal/convert/convert.go +++ b/internal/convert/convert.go @@ -5,26 +5,11 @@ import ( "path/filepath" "strings" "time" -) - -// Return the href -// inDir/a/b/c/file.md -> a/b/c/file -func Href(f string, inDir string, prefixSlash bool) string { - // inDir/a/b/c/file.md -> a/b/c/file.md - rel, _ := filepath.Rel(inDir, f) - - // a/b/c/file.md -> a/b/c/file - href := strings.TrimSuffix(rel, filepath.Ext(rel)) - - // a/b/c/file -> /a/b/c/file (for web rooting) - if prefixSlash { - href = "/" + href - } - return href -} + "github.com/mstcl/pher/v3/internal/nodepath" +) -// Resolve the date d from format YYYY-MM-DD +// Date function resolves the date d (format YYYY-MM-DD) // Returns a pretty date and a machine date func Date(date string) (string, string, error) { if len(date) == 0 { @@ -39,14 +24,15 @@ func Date(date string) (string, string, error) { return dateTime.Format("02 Jan 2006"), dateTime.Format(time.RFC3339), nil } -// If link is "/a/b/c/file.md" +// NavCrumbs returns navigation components +// If link is "/a/b/c/file.md", then: // // crumbsTitle: {"a", "b", "c"} // // crumbsLink: {"a/index.html", "a/b/index.html", "a/b/c/index.html"} -func NavCrumbs(f string, inDir string, isExt bool) ([]string, []string) { +func NavCrumbs(np nodepath.NodePath, inDir string, isExt bool) ([]string, []string) { // inDir/a/b/c/file.md -> a/b/c/file.md - rel, _ := filepath.Rel(inDir, f) + rel, _ := filepath.Rel(inDir, np.String()) // a/b/c/file.md -> {a, b, c, file.md} crumbsTitle := strings.Split(rel, "/") @@ -71,7 +57,7 @@ func NavCrumbs(f string, inDir string, isExt bool) ([]string, []string) { return crumbsTitle[:len(crumbsTitle)-1], crumbsLink } -// Return title mt else fn +// Title returns title from metadata else from filename func Title(metadataTitle string, filename string) string { var title string if len(metadataTitle) > 0 { @@ -82,9 +68,3 @@ func Title(metadataTitle string, filename string) string { return title } - -// Given /path/to/filename.ext, return filename -func FileBase(f string) string { - fn := filepath.Base(f) - return strings.TrimSuffix(fn, filepath.Ext(fn)) -} diff --git a/internal/convert/convert_test.go b/internal/convert/convert_test.go index a72a63b..60ee7a3 100644 --- a/internal/convert/convert_test.go +++ b/internal/convert/convert_test.go @@ -1,115 +1,109 @@ package convert -import ( - "fmt" - "slices" - "testing" -) +// func TestHref(t *testing.T) { +// tests := []struct { +// f string +// inDir string +// want string +// }{ +// {"/x/y/z/a/b/c/d.md", "/x/y/z", "a/b/c/d"}, +// {"/x/y/z/d.md", "/x/y/z", "d"}, +// {"/a/b/c/d.md", "/", "a/b/c/d"}, +// {"/d.md", "/", "d"}, +// } +// +// for _, tt := range tests { +// testname := fmt.Sprintf("%s,%s", tt.f, tt.want) +// t.Run(testname, func(t *testing.T) { +// ans := Href(tt.f, tt.inDir, false) +// if ans != tt.want { +// t.Errorf("got %s, want %s", ans, tt.want) +// } +// }) +// } +// } -func TestHref(t *testing.T) { - tests := []struct { - f string - inDir string - want string - }{ - {"/x/y/z/a/b/c/d.md", "/x/y/z", "a/b/c/d"}, - {"/x/y/z/d.md", "/x/y/z", "d"}, - {"/a/b/c/d.md", "/", "a/b/c/d"}, - {"/d.md", "/", "d"}, - } +// func TestNavCrumbs(t *testing.T) { +// tests := []struct { +// f string +// inDir string +// crumbs []string +// crumbLinks []string +// }{ +// { +// "/x/y/z/file.md", +// "/x/y/z", +// []string{}, +// []string{}, +// }, +// { +// "/a/b/c/file.md", +// "/", +// []string{"a", "b", "c"}, +// []string{"a/index.html", "a/b/index.html", "a/b/c/index.html"}, +// }, +// { +// "/x/y/z/a/b/c/file.md", +// "/x/y/z", +// []string{"a", "b", "c"}, +// []string{"a/index.html", "a/b/index.html", "a/b/c/index.html"}, +// }, +// } +// +// for _, tt := range tests { +// testname := fmt.Sprint(tt.f) +// t.Run(testname, func(t *testing.T) { +// crumbs, crumbLinks := NavCrumbs(tt.f, tt.inDir, true) +// if !slices.Equal(crumbs, tt.crumbs) { +// t.Errorf("got %s, want %s", crumbs, tt.crumbs) +// } +// +// if !slices.Equal(crumbLinks, tt.crumbLinks) { +// t.Errorf("got %s, want %s", crumbLinks, tt.crumbLinks) +// } +// }) +// } +// } +// +// func TestTitle(t *testing.T) { +// tests := []struct { +// mt, fn string +// want string +// }{ +// {"", "a", "a"}, +// {"a", "b", "a"}, +// {"a", "", "a"}, +// } +// +// for _, tt := range tests { +// testname := fmt.Sprintf("%s,%s,%s", tt.mt, tt.fn, tt.want) +// t.Run(testname, func(t *testing.T) { +// ans := Title(tt.mt, tt.fn) +// if ans != tt.want { +// t.Errorf("got %s, want %s", ans, tt.want) +// } +// }) +// } +// } - for _, tt := range tests { - testname := fmt.Sprintf("%s,%s", tt.f, tt.want) - t.Run(testname, func(t *testing.T) { - ans := Href(tt.f, tt.inDir, false) - if ans != tt.want { - t.Errorf("got %s, want %s", ans, tt.want) - } - }) - } -} - -func TestNavCrumbs(t *testing.T) { - tests := []struct { - f string - inDir string - crumbs []string - crumbLinks []string - }{ - { - "/x/y/z/file.md", - "/x/y/z", - []string{}, - []string{}, - }, - { - "/a/b/c/file.md", - "/", - []string{"a", "b", "c"}, - []string{"a/index.html", "a/b/index.html", "a/b/c/index.html"}, - }, - { - "/x/y/z/a/b/c/file.md", - "/x/y/z", - []string{"a", "b", "c"}, - []string{"a/index.html", "a/b/index.html", "a/b/c/index.html"}, - }, - } - - for _, tt := range tests { - testname := fmt.Sprint(tt.f) - t.Run(testname, func(t *testing.T) { - crumbs, crumbLinks := NavCrumbs(tt.f, tt.inDir, true) - if !slices.Equal(crumbs, tt.crumbs) { - t.Errorf("got %s, want %s", crumbs, tt.crumbs) - } - - if !slices.Equal(crumbLinks, tt.crumbLinks) { - t.Errorf("got %s, want %s", crumbLinks, tt.crumbLinks) - } - }) - } -} - -func TestTitle(t *testing.T) { - tests := []struct { - mt, fn string - want string - }{ - {"", "a", "a"}, - {"a", "b", "a"}, - {"a", "", "a"}, - } - - for _, tt := range tests { - testname := fmt.Sprintf("%s,%s,%s", tt.mt, tt.fn, tt.want) - t.Run(testname, func(t *testing.T) { - ans := Title(tt.mt, tt.fn) - if ans != tt.want { - t.Errorf("got %s, want %s", ans, tt.want) - } - }) - } -} - -func TestFileBase(t *testing.T) { - tests := []struct { - f string - want string - }{ - {"a/b/c/d.md", "d"}, - {"/a/b/c/d.md", "d"}, - {"/a/b/c/d", "d"}, - {"/a/b/c/d/", "d"}, - } - - for _, tt := range tests { - testname := fmt.Sprintf("%s,%s", tt.f, tt.want) - t.Run(testname, func(t *testing.T) { - ans := FileBase(tt.f) - if ans != tt.want { - t.Errorf("got %s, want %s", ans, tt.want) - } - }) - } -} +// func TestFileBase(t *testing.T) { +// tests := []struct { +// f string +// want string +// }{ +// {"a/b/c/d.md", "d"}, +// {"/a/b/c/d.md", "d"}, +// {"/a/b/c/d", "d"}, +// {"/a/b/c/d/", "d"}, +// } +// +// for _, tt := range tests { +// testname := fmt.Sprintf("%s,%s", tt.f, tt.want) +// t.Run(testname, func(t *testing.T) { +// ans := FileBase(tt.f) +// if ans != tt.want { +// t.Errorf("got %s, want %s", ans, tt.want) +// } +// }) +// } +// } diff --git a/internal/entry/entry.go b/internal/entry/entry.go deleted file mode 100644 index 09717b2..0000000 --- a/internal/entry/entry.go +++ /dev/null @@ -1,18 +0,0 @@ -package entry - -import ( - "github.com/mstcl/pher/v2/internal/listing" - "github.com/mstcl/pher/v2/internal/metadata" -) - -// An entry's data, containing: the metadata, the html body, the backlinks -// (entries that mention this entry) the related links (other entries that -// share tags), and the href. -type Entry struct { - Href string - Backlinks []listing.Listing - Relatedlinks []listing.Listing - Body []byte - ChromaCSS []byte - Metadata metadata.Metadata -} diff --git a/internal/feed/feed.go b/internal/feed/feed.go index 67d6db4..f1b1b04 100644 --- a/internal/feed/feed.go +++ b/internal/feed/feed.go @@ -6,7 +6,7 @@ import ( "os" "time" - "github.com/mstcl/pher/v2/internal/state" + "github.com/mstcl/pher/v3/internal/state" ) func Construct(s *state.State, logger *slog.Logger) (string, error) { @@ -23,7 +23,7 @@ func Construct(s *state.State, logger *slog.Logger) (string, error) { feed.Items = []*Item{} - for _, v := range s.Entries { + for _, v := range s.NodeMap { child := logger.With(slog.String("href", v.Href), slog.String("context", "atom feed")) md := v.Metadata @@ -48,7 +48,7 @@ func Construct(s *state.State, logger *slog.Logger) (string, error) { feed.Items = append(feed.Items, entry) - child.Debug("Atom entry created") + child.Debug("atom entry created") } atom, err := feed.ToAtom() @@ -66,7 +66,7 @@ func Write(s *state.State, atom string) error { b := []byte(atom) - if err := os.WriteFile(s.OutDir+"/feed.xml", b, 0o644); err != nil { + if err := os.WriteFile(s.OutputDir+"/feed.xml", b, 0o644); err != nil { return fmt.Errorf("writing article: %w", err) } diff --git a/internal/node/node.go b/internal/node/node.go new file mode 100644 index 0000000..95a9172 --- /dev/null +++ b/internal/node/node.go @@ -0,0 +1,17 @@ +package node + +import ( + "github.com/mstcl/pher/v3/internal/metadata" + "github.com/mstcl/pher/v3/internal/nodepathlink" +) + +// Node is an abstracted idea of a source markdown file. It is a file +// represented in our state. +type Node struct { + Href string + Backlinks []nodepathlink.NodePathLink + Relatedlinks []nodepathlink.NodePathLink + Body []byte + ChromaCSS []byte + Metadata metadata.Metadata +} diff --git a/internal/nodegroup/nodegroup.go b/internal/nodegroup/nodegroup.go new file mode 100644 index 0000000..53398ab --- /dev/null +++ b/internal/nodegroup/nodegroup.go @@ -0,0 +1,6 @@ +package nodegroup + +// NOTE: a nodegroup is an abstracted idea of a directory with source markdown +// files. A node's nodegroup is it's parent nodegroup. + +type Nodegroup string diff --git a/internal/nodepath/nodepath.go b/internal/nodepath/nodepath.go new file mode 100644 index 0000000..8c2e536 --- /dev/null +++ b/internal/nodepath/nodepath.go @@ -0,0 +1,72 @@ +package nodepath + +import ( + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/mattn/go-zglob" +) + +// NOTE: a nodepath is the path of a nodegroup or node (note this +// non-distinction) + +type NodePath string + +func (np NodePath) String() string { + return string(np) +} + +// Base given a path /path/to/filename.ext, returns filename +func (np NodePath) Base() string { + fn := filepath.Base(np.String()) + + return strings.TrimSuffix(fn, filepath.Ext(fn)) +} + +func (np NodePath) IsNodegroup() (bool, error) { + // Stat nodepath + npStat, err := os.Stat(np.String()) + if err != nil { + return false, fmt.Errorf("os.Stat %s: %w", np, err) + } + + // Only process nodegroups + if npStat.Mode().IsRegular() { + return false, nil + } + + return true, nil +} + +func (np NodePath) HasChildren() (bool, error) { + // we want to check all nested files + files, err := zglob.Glob(filepath.Join(np.String(), "**", "*.md")) + if err != nil { + return false, err + } + + if len(files) == 0 { + return false, nil + } + + return true, nil +} + +// Href function returns the href, which is defined as follows: +// inputDir/a/b/c/file.md -> a/b/c/file +func (np NodePath) Href(inputDir string, prefixSlash bool) string { + // inDir/a/b/c/file.md -> a/b/c/file.md + rel, _ := filepath.Rel(inputDir, np.String()) + + // a/b/c/file.md -> a/b/c/file + href := strings.TrimSuffix(rel, filepath.Ext(rel)) + + // a/b/c/file -> /a/b/c/file (for web rooting) + if prefixSlash { + href = "/" + href + } + + return href +} diff --git a/internal/listing/listing.go b/internal/nodepathlink/listentry.go similarity index 77% rename from internal/listing/listing.go rename to internal/nodepathlink/listentry.go index 809232d..14397ed 100644 --- a/internal/listing/listing.go +++ b/internal/nodepathlink/listentry.go @@ -1,8 +1,9 @@ -package listing +package nodepathlink import "html/template" -// A listing (widely used: e.g. entry archive, links, etc.) +// A nodepath link is a link to another nodegroup or node +// Each nodegroup/node has its a list of nodepath links // // * Href: target link // @@ -13,7 +14,7 @@ import "html/template" // * IsDir: source is directory or not // // The rest are for Log View, similar to render.RenderData -type Listing struct { +type NodePathLink struct { Body template.HTML Href string Title string diff --git a/internal/render/render.go b/internal/render/render.go index 8578f59..fb37272 100644 --- a/internal/render/render.go +++ b/internal/render/render.go @@ -10,11 +10,12 @@ import ( "os" "path/filepath" - "github.com/mstcl/pher/v2/internal/config" - "github.com/mstcl/pher/v2/internal/convert" - "github.com/mstcl/pher/v2/internal/listing" - "github.com/mstcl/pher/v2/internal/state" - "github.com/mstcl/pher/v2/internal/tag" + "github.com/mstcl/pher/v3/internal/config" + "github.com/mstcl/pher/v3/internal/convert" + "github.com/mstcl/pher/v3/internal/nodepath" + "github.com/mstcl/pher/v3/internal/nodepathlink" + "github.com/mstcl/pher/v3/internal/state" + "github.com/mstcl/pher/v3/internal/tag" "golang.org/x/sync/errgroup" ) @@ -46,7 +47,7 @@ type data struct { Tags []string TagsListing []tag.Tag Footer []config.FooterLink - Backlinks, Relatedlinks, Crumbs, Listing []listing.Listing + Backlinks, Relatedlinks, Crumbs, Listing []nodepathlink.NodePathLink TOC bool ShowHeader bool } @@ -87,38 +88,36 @@ func Render(ctx context.Context, s *state.State, logger *slog.Logger) error { eg, _ := errgroup.WithContext(ctx) - for _, f := range s.Files { - f := f - - child := logger.With(slog.String("filepath", f), slog.String("context", "templating")) + for _, np := range s.NodePaths { + child := logger.With(slog.Any("nodepath", np), slog.String("context", "templating")) child.Debug("submitting goroutine") eg.Go(func() error { // Don't render drafts or skipped files - entry := s.Entries[f] - if entry.Metadata.Draft || s.Skip[f] { + entry := s.NodeMap[np] + if entry.Metadata.Draft || s.SkippedNodePathMap[np] { return nil } // Get navigation crumbs - crumbsTitle, crumbsLink := convert.NavCrumbs(f, s.InDir, s.Config.IsExt) + crumbsTitle, crumbsLink := convert.NavCrumbs(np, s.InputDir, s.Config.IsExt) // Populate navigation crumbs - crumbs := []listing.Listing{} + crumbs := []nodepathlink.NodePathLink{} for i, t := range crumbsTitle { - crumbs = append(crumbs, listing.Listing{Href: crumbsLink[i], Title: t}) + crumbs = append(crumbs, nodepathlink.NodePathLink{Href: crumbsLink[i], Title: t}) } // The output path outDir/{a/b/c/file}.html (part in curly brackets is the href) - outPath := s.OutDir + convert.Href(f, s.InDir, true) + ".html" + outPath := s.OutputDir + np.Href(s.InputDir, true) + ".html" // Construct rendering data (entryData) from config, entry data, listing, nav // crumbs, etc. entryData := data{ OutFilename: outPath, - Listing: s.Listings[f], - Filename: convert.FileBase(f), + Listing: s.NodePathLinksMap[np], + Filename: np.Base(), Description: entry.Metadata.Description, Tags: entry.Metadata.Tags, TOC: entry.Metadata.TOC, @@ -152,14 +151,16 @@ func Render(ctx context.Context, s *state.State, logger *slog.Logger) error { } // Use data updated only if given - entryData.DateUpdated, entryData.MachineDateUpdated, err = convert.Date(entry.Metadata.DateUpdated) + entryData.DateUpdated, entryData.MachineDateUpdated, err = convert.Date( + entry.Metadata.DateUpdated, + ) if err != nil { return err } // Add tags only to root index - if f == s.InDir+"/index.md" { - entryData.TagsListing = s.Tags + if np == nodepath.NodePath(filepath.Join(s.InputDir, "index.md")) { + entryData.TagsListing = s.NodeTags } // Render @@ -190,8 +191,8 @@ func Render(ctx context.Context, s *state.State, logger *slog.Logger) error { data: &data{ RootCrumb: s.Config.RootCrumb, Footer: s.Config.Footer, - TagsListing: s.Tags, - OutFilename: s.OutDir + "/tags.html", + TagsListing: s.NodeTags, + OutFilename: s.OutputDir + "/tags.html", Path: s.Config.Path, }, }); err != nil { diff --git a/internal/source/parse.go b/internal/source/parse.go index 7882d89..8286181 100644 --- a/internal/source/parse.go +++ b/internal/source/parse.go @@ -6,11 +6,11 @@ import ( "fmt" chromahtml "github.com/alecthomas/chroma/v2/formatters/html" - "github.com/mstcl/pher/v2/internal/customanchor" - "github.com/mstcl/pher/v2/internal/frontmatter" - "github.com/mstcl/pher/v2/internal/metadata" - "github.com/mstcl/pher/v2/internal/toc" - "github.com/mstcl/pher/v2/internal/wikilink" + "github.com/mstcl/pher/v3/internal/customanchor" + "github.com/mstcl/pher/v3/internal/frontmatter" + "github.com/mstcl/pher/v3/internal/metadata" + "github.com/mstcl/pher/v3/internal/toc" + "github.com/mstcl/pher/v3/internal/wikilink" "github.com/yuin/goldmark" highlighting "github.com/yuin/goldmark-highlighting/v2" "github.com/yuin/goldmark/ast" diff --git a/internal/state/state.go b/internal/state/state.go index 6b54db3..03b6631 100644 --- a/internal/state/state.go +++ b/internal/state/state.go @@ -3,33 +3,44 @@ package state import ( "html/template" - "github.com/mstcl/pher/v2/internal/config" - "github.com/mstcl/pher/v2/internal/entry" - "github.com/mstcl/pher/v2/internal/listing" - "github.com/mstcl/pher/v2/internal/tag" + "github.com/mstcl/pher/v3/internal/assetpath" + "github.com/mstcl/pher/v3/internal/config" + "github.com/mstcl/pher/v3/internal/node" + "github.com/mstcl/pher/v3/internal/nodepath" + "github.com/mstcl/pher/v3/internal/nodepathlink" + "github.com/mstcl/pher/v3/internal/tag" ) +// State [TODO] +// +// * SkippedNodePathMap: map of NodePaths that shouldn't be rendered because its +// nodegroup is of Log listing type. +// +// * NodegroupWithoutIndexMap: map of Nodegroups that don't have an index file type State struct { - Config *config.Config - Templates *template.Template - Entries map[string]entry.Entry - Assets map[string]bool - Skip map[string]bool - Missing map[string]bool - Listings map[string][]listing.Listing - InDir string - OutDir string - Files []string - Tags []tag.Tag - DryRun bool + Config *config.Config + Templates *template.Template + NodeMap map[nodepath.NodePath]node.Node + UserAssetMap map[assetpath.AssetPath]bool + SkippedNodePathMap map[nodepath.NodePath]bool + NodegroupWithoutIndexMap map[nodepath.NodePath]bool + NodePathLinksMap map[nodepath.NodePath][]nodepathlink.NodePathLink + InputDir string + OutputDir string + ConfigFile string + NodePaths []nodepath.NodePath + NodeTags []tag.Tag + ShowVersion bool + Debug bool + DryRun bool } func Init() State { return State{ - Entries: make(map[string]entry.Entry), - Assets: make(map[string]bool), - Listings: make(map[string][]listing.Listing), - Skip: make(map[string]bool), - Tags: []tag.Tag{}, + NodeMap: make(map[nodepath.NodePath]node.Node), + UserAssetMap: make(map[assetpath.AssetPath]bool), + NodePathLinksMap: make(map[nodepath.NodePath][]nodepathlink.NodePathLink), + SkippedNodePathMap: make(map[nodepath.NodePath]bool), + NodeTags: []tag.Tag{}, } } diff --git a/internal/tag/tag.go b/internal/tag/tag.go index b7ef0c9..7491890 100644 --- a/internal/tag/tag.go +++ b/internal/tag/tag.go @@ -1,6 +1,6 @@ package tag -import "github.com/mstcl/pher/v2/internal/listing" +import "github.com/mstcl/pher/v3/internal/nodepathlink" // A tag struct for extract.extractTags and render.RenderTags. Not to be // conceptually confused with parse.Metadata.Tags !!! @@ -12,6 +12,6 @@ import "github.com/mstcl/pher/v2/internal/listing" // * Links: entries (represtend as listing.Listing) for a given tag type Tag struct { Name string - Links []listing.Listing + Links []nodepathlink.NodePathLink Count int } diff --git a/main.go b/main.go index f871947..3ff9d89 100644 --- a/main.go +++ b/main.go @@ -20,10 +20,10 @@ import ( "time" "github.com/lmittmann/tint" - "github.com/mstcl/pher/v2/internal/cli" + "github.com/mstcl/pher/v3/internal/cli" ) -//go:embed web/template/* +//go:embed web/template/* web/static/* var fs embed.FS func main() { @@ -31,9 +31,9 @@ func main() { TimeFormat: time.Kitchen, })) - cli.Templates = fs + cli.EmbedFS = fs - if err := cli.Parse(); err != nil { + if err := cli.Handler(); err != nil { logger.Error(fmt.Sprintf("%v", err)) os.Exit(1) diff --git a/web/static/style.css b/web/static/style.css new file mode 100644 index 0000000..407686b --- /dev/null +++ b/web/static/style.css @@ -0,0 +1,919 @@ +:root { + --background: #fff; + --background-2: #fafafa; + --foreground: #222; + --bold: #000; + --secondary: #f2f2f2; + --tertiary: #e0e0e0; + --quaternary: #777; +} + +.chroma { + background-color: var(--background-2); +} + +html { + line-height: 1.75; + font-size: 14px; + box-sizing: border-box; +} + +a, +body { + color: var(--foreground); +} + +body, +html { + -webkit-text-size-adjust: 100%; +} + +body { + font-family: + -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu, + Cantarell, "Helvetica Neue", sans-serif; + background-color: var(--background); + font-size: 1rem; + font-weight: 400; + padding: 1em; + margin: auto; + max-width: 42rem; + line-height: 1.85; + display: flex; + flex-direction: column; + padding: 1rem; + gap: 2.5rem; +} + +details, +hr, +main { + display: block; +} + +h1 { + margin: 0.67em0; +} + +hr { + box-sizing: content-box; + height: 2px; + border: 0; + border-top: 1px solid var(--secondary); + border-bottom: 1px solid var(--secondary); + margin: 1em 0; +} + +a { + background-color: #fff0; + text-decoration: none; +} + +abbr[title] { + border-bottom: none; + text-decoration: underline dotted; +} + +b, +strong { + font-weight: 700; +} + +code, +kbd, +samp { + border-radius: 0.25rem; + padding: 0.2rem 0.3rem; +} + +hr, +pre > code { + padding: 0; +} + +small { + font-size: 80%; +} + +img { + border-style: none; +} + +button, +input, +optgroup, +select, +textarea { + font-family: inherit; + font-size: 100%; + line-height: 1.15; + margin: 0; +} + +button, +hr, +input { + overflow: visible; +} + +button, +select { + text-transform: none; +} + +[type="button"], +[type="reset"], +[type="submit"], +button { + -webkit-appearance: button; +} + +[type="button"]::-moz-focus-inner, +[type="reset"]::-moz-focus-inner, +[type="submit"]::-moz-focus-inner, +button::-moz-focus-inner { + border-style: none; + padding: 0; +} + +[type="button"]:-moz-focusring, +[type="reset"]:-moz-focusring, +[type="submit"]:-moz-focusring, +button:-moz-focusring { + outline: 1px dotted ButtonText; +} + +fieldset { + padding: 0.35em 0.75em 0.625em; +} + +legend { + color: inherit; + display: table; + max-width: 100%; + white-space: normal; +} + +progress { + vertical-align: baseline; +} + +textarea { + overflow: auto; +} + +[type="checkbox"], +[type="radio"], +legend { + box-sizing: border-box; + padding: 0; +} + +[type="number"]::-webkit-inner-spin-button, +[type="number"]::-webkit-outer-spin-button { + height: auto; +} + +[type="search"] { + -webkit-appearance: textfield; + outline-offset: -2px; +} + +[type="search"]::-webkit-search-decoration { + -webkit-appearance: none; +} + +::-webkit-file-upload-button { + -webkit-appearance: button; + font: inherit; +} + +summary { + display: list-item; +} + +.footnotes > hr, +[hidden], +template { + display: none; +} + +*, +:after, +:before { + box-sizing: inherit; +} + +h1, +h2 { + color: var(--foreground); +} + +h1, +h2, +h3, +h4, +h5, +h6 { + margin-top: 2.5rem; + letter-spacing: -0.025em; + text-transform: uppercase; + line-height: 1.5; +} + +h1 { + font-size: 1.2rem; + font-weight: 800; +} + +h2, +h3, +h4, +h5, +h6 { + font-size: 0.894rem; +} + +h3, +h4, +h5, +h6 { + color: var(--quaternary); +} + +h2, +h4, +h5, +h6 { + font-weight: bold; +} + +p { + margin: 1.2rem 0; +} + +.links ul li a:visited, +a:visited { + color: var(--foreground); +} + +a:active { + color: var(--foreground); + background: var(--tertiary); +} + +ol, +ul { + margin: 1rem 0; + padding: 0 0 0 0 2rem; +} + +ol ol, +ol ul, +ul ol, +ul ul { + margin: 0.3rem 0; +} + +li p:last-child { + margin-bottom: 0; +} + +blockquote { + margin: 0; + padding-left: 1em; + font-weight: 500; + border-left: 0.25em var(--secondary) solid; +} + +code, +kbd, +pre, +samp { + font-family: + ui-monospace, SFMono-Regular, Consolas, "Liberation Mono", Menlo, monospace; + word-wrap: break-word; + color: var(--foreground); + background: var(--background-2); +} + +pre { + white-space: pre; + border-radius: 0.125rem; + border: 1px solid var(--secondary); + line-height: 1.5; + font-size: 13px; + padding: 0.75rem; + border-width: 1px; + overflow-x: auto; +} + +code, +kbd, +samp { + font-size: 0.98em; +} + +blockquote, +dfn { + font-style: italic; +} + +ins { + background: #ff9; + color: #000; + text-decoration: none; +} + +sub, +sup { + font-size: 75%; + line-height: 0; + position: relative; + vertical-align: baseline; +} + +sup { + top: -0.5em; +} + +sub { + bottom: -0.25em; +} + +article a, +article a:after, +article a:visited { + color: var(--bold); + font-weight: bold; +} + +article a::after { + content: " ↗"; + font-size: 0.75rem; + vertical-align: text-bottom; +} + +.footnote-backref:visited, +.tags-listing span, +blockquote, +details { + color: var(--quaternary); +} + +table { + text-indent: 0; + border-color: inherit; + border-collapse: collapse; + text-align: left; +} + +tbody tr, +thead { + border-bottom: 1px solid var(--tertiary); +} + +thead th { + font-weight: 600; + vertical-align: bottom; + padding-right: 0.571em; + padding-bottom: 0.571em; + padding-left: 0.571em; +} + +tbody td, +tfoot td { + vertical-align: baseline; + padding: 0.571em; +} + +nav > :not(:last-child)::after { + content: "/"; +} + +nav { + text-transform: lowercase; + font-size: 0.765rem; + font-weight: 500; + line-height: 1.8; + margin-top: 1.5rem; +} + +aside a, +aside a:visited, +footer a, +footer a:visited, +header a, +header a:visited { + color: var(--quaternary); + text-decoration: none; +} + +main > header { + flex-direction: column; + gap: 0.5rem; + display: flex; +} + +article header { + margin-bottom: 3rem; +} + +article header:only-child { + margin: 0; +} + +article :where(article :last-child):is(p) { + margin-bottom: 0; +} + +article li::marker { + color: var(--quaternary); +} + +:not(pre) > code { + border: 1px solid var(--secondary); +} + +dd, +dl, +dt { + margin: 0; +} + +dt { + font-weight: 600; +} + +header footer { + margin-top: auto; + align-items: baseline; + font-size: 0.765rem; + line-height: 1rem; + display: flex; +} + +footer { + font-size: 0.9rem; + line-height: 1.8rem; + margin-bottom: 1.75rem; +} + +.h-anchor { + text-decoration-line: none !important; + justify-content: center; + align-items: center; + font-size: inherit; +} + +.footnotes { + font-size: 0.85rem; + color: var(--quaternary); + margin-top: 1.5rem; + vertical-align: middle; +} + +.footnotes::before { + content: ""; + display: block; + width: 5rem; + height: 1px; + background-color: var(--secondary); + margin: auto; +} + +.article-meta a::after, +.footnote-ref::after, +.footnotes a::after, +.section-heading::after, +.wikilink::after, +article h2::after, +article h3::after, +article h4::after, +article h5::after, +article h6::after { + content: ""; +} + +.footnote-backref, +.footnote-ref { + color: var(--quaternary); + text-decoration: none; +} + +.article-header { + font-family: + ui-monospace, SFMono-Regular, Consolas, "Liberation Mono", Menlo, monospace; + font-size: 0.75rem; + flex-direction: column; + gap: 0.5rem; + display: flex; + color: var(--quaternary); +} + +.article-title { + margin-top: 0; + margin-bottom: 0.25rem; + font-weight: bold; + font-family: + ui-monospace, SFMono-Regular, Consolas, "Liberation Mono", Menlo, monospace; +} + +.article-description { + color: var(--quaternary); + margin: 0; + font-size: 0.894rem; + line-height: 1.75; + font-weight: 600; + text-transform: lowercase; + font-family: + ui-monospace, SFMono-Regular, Consolas, "Liberation Mono", Menlo, monospace; +} + +.article-meta { + align-items: baseline; + font-size: 0.765rem; + display: flex; +} + +.article-meta a { + font-weight: normal; +} + +.article-tags { + font-family: + ui-monospace, SFMono-Regular, Consolas, "Liberation Mono", Menlo, monospace; + padding: 0; + margin: 0; + list-style: none; +} + +.article-tags li { + padding: 0; + margin: 0; + display: inline; +} + +.article-meta > :not(:last-child)::after, +footer ul li:not(:last-child)::after { + content: "·"; + color: var(--tertiary); + margin-left: 0.375rem; + margin-right: 0.375rem; +} + +.links { + flex-direction: column; + gap: 3rem; + display: flex; +} + +.section-heading { + color: var(--quaternary); + text-transform: uppercase; + font-weight: 600; + letter-spacing: 0.025em; + margin-bottom: 0.75rem; + font-size: 0.75rem; + line-height: 1rem; + margin-top: 0; +} + +.section-heading::after, +article h2::after, +article h3::after, +article h4::after, +article h5::after, +article h6::after { + background-color: var(--secondary); + flex: 1 1 0%; + height: 1px; +} + +.section-heading, +article h2, +article h3, +article h4, +article h5, +article h6 { + display: flex; + gap: 0.5rem; + align-items: center; + font-family: + ui-monospace, SFMono-Regular, Consolas, "Liberation Mono", Menlo, monospace; +} + +.footer-links { + display: flex; + align-items: baseline; + list-style: none; + margin: 0; + padding: 0; + flex-direction: row; + flex-wrap: wrap; + font-family: + ui-monospace, SFMono-Regular, Consolas, "Liberation Mono", Menlo, monospace; +} + +.tag:target > h6 { + text-decoration: underline; + text-decoration-thickness: 2px; + text-decoration-color: var(--tertiary); +} + +.index-grid a, +.links ul { + display: flex; + flex-direction: column; +} + +.links ul { + align-items: flex-start; + margin: 0; + padding: 0; + margin-left: calc(0.5rem * -1); + margin-right: calc(0.5rem * -1); + list-style: none; + line-height: 1.5; +} + +.links li { + max-width: 100%; +} + +.links ul li a { + color: var(--foreground); + gap: 0.375rem; + display: flex; + flex-direction: row; + padding: 0.25rem 0.5rem; +} + +.links-info { + white-space: nowrap; + overflow: hidden; + flex: 1 1 0%; + display: flex; +} + +.links-info-grid:hover, +.links-info:hover, +.tags-listing a:hover { + background-color: var(--secondary); +} + +.links ul li a, +.links-info, +.links-info-grid { + align-items: baseline; + background-color: #fff0; + border-radius: 0.25rem; +} + +.links-description, +.links-title { + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; +} + +.links-title { + font-weight: 500; + flex-shrink: 0; + color: var(--foreground); +} + +.links-description { + font-weight: 400; + color: var(--quaternary); +} + +.index-grid { + margin: 0 !important; + padding: 0 !important; + margin-left: calc(0.5rem * -1) !important; + margin-right: calc(0.5rem * -1) !important; + gap: 0.375rem !important; + display: grid !important; + line-height: unset !important; +} + +.index-grid a { + gap: 0.25rem; + padding: 0.5rem; +} + +@media (min-width: 640px) { + .index-grid { + grid-template-columns: repeat(3, minmax(0, 1fr)); + } +} + +.links-title-grid { + display: block; + font-weight: 500; + flex-shrink: 0; + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; + color: var(--foreground); +} + +.links-description-grid { + font-weight: 400; + color: var(--quaternary); + font-size: 0.875rem; +} + +.tags-listing { + display: flex !important; + margin: 0 !important; + padding: 0 !important; + margin-left: calc(0.5rem * -1) !important; + margin-right: calc(0.5rem * -1) !important; + flex-wrap: wrap !important; + list-style: none !important; + flex-direction: row !important; +} + +.tags-listing a { + display: block !important; +} + +.h-anchor::after, +.toc li a::after { + content: ""; +} + +:where(h1, h2) > .h-anchor { + color: var(--foreground); +} + +:where(h3, h4, h5, h6) > .h-anchor { + color: var(--quaternary); +} + +:where(h1, h2, h3, h4, h5, h6):hover > .h-anchor { + opacity: 1; +} + +.toc, +.toc li { + color: var(--quaternary); +} + +.toc { + text-transform: uppercase; + border-radius: 0.25rem; + border: 1px solid var(--secondary); + list-style: none; + margin-left: 1rem; + font-weight: 400; + font-size: 0.75rem; + line-height: 1.5; + background-color: var(--background-2); + padding: 0.5rem 0.75rem; + margin-top: 0; + margin-bottom: 0; + float: right; + display: none; + font-family: + ui-monospace, SFMono-Regular, Consolas, "Liberation Mono", Menlo, monospace; +} + +.toc li { + font-weight: 500; + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; +} + +.toc li:first-child { + margin-top: 0.25rem; +} + +.toc li a { + font-size: 0.765rem; + font-weight: 600; + text-decoration: none; + color: var(--quaternary); +} + +.content-wrapper { + display: flex; + flex-direction: column; + gap: 3rem; +} + +@media (min-width: 1024px) { + .toc { + width: 30%; + } +} + +@media (min-width: 640px) { + .toc { + display: block; + } +} + +@media screen and (min-width: 768px) { + body { + max-width: 90%; + } + + .content-wrapper { + flex-direction: row; + gap: 3.5rem; + align-items: flex-start; + } + + main { + flex: 1 1 0%; + max-width: 55rem; + } + + .links { + width: 32%; + flex-shrink: 0; + border-left: 1px solid var(--secondary); + padding-left: 2.5rem; + } + + .tags-page { + border: none; + padding-left: 0rem; + } + + .h-anchor { + opacity: 0; + display: flex; + margin-left: -1.5rem; + transition-property: opacity; + transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1); + border-radius: 0.125rem; + transition-duration: 150ms; + position: absolute; + } +} +@media print { + * { + background: transparent !important; + color: black !important; + filter: none !important; + -ms-filter: none !important; + } + body { + font-size: 12pt; + max-width: 100%; + } + a, + a:visited { + text-decoration: underline; + } + hr { + height: 1px; + border: 0; + border-bottom: 1px solid #000; + } + a[href]:after { + content: " (" attr(href) ")"; + } + abbr[title]:after { + content: " (" attr(title) ")"; + } + .ir a:after, + a[href^="javascript:"]:after, + a[href^="#"]:after { + content: ""; + } + pre, + blockquote { + border: 1px solid var(--tertiary); + padding-right: 1em; + page-break-inside: avoid; + } + tr, + img { + page-break-inside: avoid; + } + img { + max-width: 100% !important; + } + @page :left { + margin: 15mm 20mm 15mm 10mm; + } + @page :right { + margin: 15mm 10mm 15mm 20mm; + } + p, + h2, + h3 { + orphans: 3; + widows: 3; + } + h2, + h3 { + page-break-after: avoid; + } +} diff --git a/web/template/aside.tmpl b/web/template/aside.tmpl index 90c09c1..290c9cc 100644 --- a/web/template/aside.tmpl +++ b/web/template/aside.tmpl @@ -43,7 +43,7 @@ {{- range .Backlinks}}