mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
Merge remote-tracking branch 'origin/parser' into mrg_praser
Also brought in parse for github.com/noahcampbell/akebia Conflicts: hugolib/page.go hugolib/page_test.go
This commit is contained in:
commit
a82efe5bb1
7 changed files with 832 additions and 141 deletions
154
hugolib/page.go
154
hugolib/page.go
|
@ -14,7 +14,6 @@
|
|||
package hugolib
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
|
@ -23,6 +22,7 @@ import (
|
|||
helper "github.com/spf13/hugo/template"
|
||||
"github.com/spf13/hugo/template/bundle"
|
||||
"github.com/theplant/blackfriday"
|
||||
"github.com/spf13/hugo/parser"
|
||||
"html/template"
|
||||
"io"
|
||||
"launchpad.net/goyaml"
|
||||
|
@ -30,7 +30,6 @@ import (
|
|||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
type Page struct {
|
||||
|
@ -188,32 +187,6 @@ func (p *Page) analyzePage() {
|
|||
p.FuzzyWordCount = int((p.WordCount+100)/100) * 100
|
||||
}
|
||||
|
||||
func splitPageContent(data []byte, start string, end string) ([]string, []string) {
|
||||
lines := strings.Split(string(data), "\n")
|
||||
datum := lines[0:]
|
||||
|
||||
var found = 0
|
||||
if start != end {
|
||||
for i, line := range lines {
|
||||
|
||||
if strings.HasPrefix(line, start) {
|
||||
found += 1
|
||||
}
|
||||
|
||||
if strings.HasPrefix(line, end) {
|
||||
found -= 1
|
||||
}
|
||||
|
||||
if found == 0 {
|
||||
datum = lines[0 : i+1]
|
||||
lines = lines[i+1:]
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return datum, lines
|
||||
}
|
||||
|
||||
func (p *Page) Permalink() template.HTML {
|
||||
baseUrl := string(p.Site.BaseUrl)
|
||||
section := strings.TrimSpace(p.Section)
|
||||
|
@ -243,12 +216,17 @@ func (p *Page) Permalink() template.HTML {
|
|||
|
||||
func (page *Page) handleTomlMetaData(datum []byte) (interface{}, error) {
|
||||
m := map[string]interface{}{}
|
||||
datum = removeTomlIdentifier(datum)
|
||||
if _, err := toml.Decode(string(datum), &m); err != nil {
|
||||
return m, fmt.Errorf("Invalid TOML in %s \nError parsing page meta data: %s", page.FileName, err)
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
func removeTomlIdentifier(datum []byte) []byte {
|
||||
return bytes.Replace(datum, []byte("+++"), []byte(""), -1)
|
||||
}
|
||||
|
||||
func (page *Page) handleYamlMetaData(datum []byte) (interface{}, error) {
|
||||
m := map[string]interface{}{}
|
||||
if err := goyaml.Unmarshal(datum, &m); err != nil {
|
||||
|
@ -339,73 +317,6 @@ func (page *Page) GetParam(key string) interface{} {
|
|||
return nil
|
||||
}
|
||||
|
||||
var ErrDetectingFrontMatter = errors.New("unable to detect front matter")
|
||||
var ErrMatchingStartingFrontMatterDelimiter = errors.New("unable to match beginning front matter delimiter")
|
||||
var ErrMatchingEndingFrontMatterDelimiter = errors.New("unable to match ending front matter delimiter")
|
||||
|
||||
func (page *Page) parseFrontMatter(data *bufio.Reader) (err error) {
|
||||
|
||||
if err = checkEmpty(data); err != nil {
|
||||
return fmt.Errorf("%s: %s", page.FileName, err)
|
||||
}
|
||||
|
||||
var mark rune
|
||||
if mark, err = chompWhitespace(data); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
f := page.detectFrontMatter(mark)
|
||||
if f == nil {
|
||||
return ErrDetectingFrontMatter
|
||||
}
|
||||
|
||||
if found, err := beginFrontMatter(data, f); err != nil || !found {
|
||||
return ErrMatchingStartingFrontMatterDelimiter
|
||||
}
|
||||
|
||||
var frontmatter = new(bytes.Buffer)
|
||||
for {
|
||||
line, _, err := data.ReadLine()
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
return ErrMatchingEndingFrontMatterDelimiter
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if bytes.Equal(line, f.markend) {
|
||||
if f.includeMark {
|
||||
frontmatter.Write(line)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
frontmatter.Write(line)
|
||||
frontmatter.Write([]byte{'\n'})
|
||||
}
|
||||
|
||||
metadata, err := f.parse(frontmatter.Bytes())
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if err = page.update(metadata); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func checkEmpty(data *bufio.Reader) (err error) {
|
||||
if _, _, err = data.ReadRune(); err != nil {
|
||||
return errors.New("unable to locate front matter")
|
||||
}
|
||||
if err = data.UnreadRune(); err != nil {
|
||||
return errors.New("unable to unread first charactor in page buffer.")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type frontmatterType struct {
|
||||
markstart, markend []byte
|
||||
parse func([]byte) (interface{}, error)
|
||||
|
@ -425,37 +336,6 @@ func (page *Page) detectFrontMatter(mark rune) (f *frontmatterType) {
|
|||
}
|
||||
}
|
||||
|
||||
func beginFrontMatter(data *bufio.Reader, f *frontmatterType) (bool, error) {
|
||||
var err error
|
||||
var peek []byte
|
||||
if f.includeMark {
|
||||
peek, err = data.Peek(len(f.markstart))
|
||||
} else {
|
||||
peek = make([]byte, len(f.markstart))
|
||||
_, err = data.Read(peek)
|
||||
}
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return bytes.Equal(peek, f.markstart), nil
|
||||
}
|
||||
|
||||
func chompWhitespace(data *bufio.Reader) (r rune, err error) {
|
||||
for {
|
||||
r, _, err = data.ReadRune()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if unicode.IsSpace(r) {
|
||||
continue
|
||||
}
|
||||
if err := data.UnreadRune(); err != nil {
|
||||
return r, errors.New("unable to unread first charactor in front matter.")
|
||||
}
|
||||
return r, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Page) Render(layout ...string) template.HTML {
|
||||
curLayout := ""
|
||||
|
||||
|
@ -474,18 +354,30 @@ func (p *Page) ExecuteTemplate(layout string) *bytes.Buffer {
|
|||
}
|
||||
|
||||
func (page *Page) parse(reader io.Reader) error {
|
||||
data := bufio.NewReader(reader)
|
||||
|
||||
err := page.parseFrontMatter(data)
|
||||
p, err := parser.ReadFrom(reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
front := p.FrontMatter()
|
||||
if len(front) == 0 {
|
||||
return errors.New("Unable to locate frontmatter")
|
||||
}
|
||||
fm := page.detectFrontMatter(rune(front[0]))
|
||||
meta, err := fm.parse(front)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = page.update(meta); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch page.Markup {
|
||||
case "md":
|
||||
page.convertMarkdown(data)
|
||||
page.convertMarkdown(bytes.NewReader(p.Content()))
|
||||
case "rst":
|
||||
page.convertRestructuredText(data)
|
||||
page.convertRestructuredText(bytes.NewReader(p.Content()))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ func TestParseIndexes(t *testing.T) {
|
|||
} {
|
||||
p, err := ReadFrom(strings.NewReader(test), "page/with/index")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed parsing page: %s", err)
|
||||
t.Fatalf("Failed parsing %q: %s", test, err)
|
||||
}
|
||||
|
||||
param := p.GetParam("tags")
|
||||
|
|
|
@ -133,7 +133,7 @@ func TestDegenerateEmptyPage(t *testing.T) {
|
|||
t.Fatalf("Expected ReadFrom to return an error when an empty buffer is passed.")
|
||||
}
|
||||
|
||||
checkError(t, err, "test: unable to locate front matter")
|
||||
checkError(t, err, "EOF")
|
||||
}
|
||||
|
||||
func checkPageTitle(t *testing.T, page *Page, title string) {
|
||||
|
@ -242,9 +242,9 @@ func TestDegenerateInvalidFrontMatterShortDelim(t *testing.T) {
|
|||
r string
|
||||
err string
|
||||
}{
|
||||
{INVALID_FRONT_MATTER_SHORT_DELIM, "unable to match beginning front matter delimiter"},
|
||||
{INVALID_FRONT_MATTER_SHORT_DELIM_ENDING, "unable to match ending front matter delimiter"},
|
||||
{INVALID_FRONT_MATTER_MISSING, "unable to detect front matter"},
|
||||
{INVALID_FRONT_MATTER_SHORT_DELIM, "Unable to locate frontmatter"},
|
||||
{INVALID_FRONT_MATTER_SHORT_DELIM_ENDING, "EOF"},
|
||||
{INVALID_FRONT_MATTER_MISSING, "Unable to locate frontmatter"},
|
||||
}
|
||||
for _, test := range tests {
|
||||
_, err := ReadFrom(strings.NewReader(test.r), "invalid/front/matter/short/delim")
|
||||
|
|
|
@ -8,10 +8,9 @@ import (
|
|||
"testing"
|
||||
)
|
||||
|
||||
const SLUG_DOC_1 = "---\ntitle: slug doc 1\nslug: slug-doc-1\naliases:\n - sd1/foo/\n - sd2\n - sd3/\n - sd4.html\n---\nslug doc 1 content"
|
||||
const SLUG_DOC_1 = "---\ntitle: slug doc 1\nslug: slug-doc-1\naliases:\n - sd1/foo/\n - sd2\n - sd3/\n - sd4.html\n---\nslug doc 1 content\n"
|
||||
|
||||
//const SLUG_DOC_1 = "---\ntitle: slug doc 1\nslug: slug-doc-1\n---\nslug doc 1 content"
|
||||
const SLUG_DOC_2 = "---\ntitle: slug doc 2\nslug: slug-doc-2\n---\nslug doc 2 content"
|
||||
const SLUG_DOC_2 = "---\ntitle: slug doc 2\nslug: slug-doc-2\n---\nslug doc 2 content\n"
|
||||
|
||||
const INDEX_TEMPLATE = "{{ range .Data.Pages }}.{{ end }}"
|
||||
|
||||
|
@ -59,7 +58,7 @@ func (t *InMemoryAliasTarget) Publish(label string, permalink template.HTML) (er
|
|||
|
||||
var urlFakeSource = []byteSource{
|
||||
{"content/blue/doc1.md", []byte(SLUG_DOC_1)},
|
||||
{"content/blue/doc2.md", []byte(SLUG_DOC_2)},
|
||||
// {"content/blue/doc2.md", []byte(SLUG_DOC_2)},
|
||||
}
|
||||
|
||||
func TestPageCount(t *testing.T) {
|
||||
|
@ -96,7 +95,7 @@ func TestPageCount(t *testing.T) {
|
|||
t.Errorf("No indexed rendered. %v", target.files)
|
||||
}
|
||||
|
||||
expected := "<html><head></head><body>..</body></html>"
|
||||
expected := "<html><head></head><body>.</body></html>"
|
||||
if string(blueIndex) != expected {
|
||||
t.Errorf("Index template does not match expected: %q, got: %q", expected, string(blueIndex))
|
||||
}
|
||||
|
|
263
parser/long_text_test.md
Normal file
263
parser/long_text_test.md
Normal file
|
@ -0,0 +1,263 @@
|
|||
---
|
||||
title: The Git Book - Long Text
|
||||
---
|
||||
# Getting Started #
|
||||
|
||||
This chapter will be about getting started with Git. We will begin at the beginning by explaining some background on version control tools, then move on to how to get Git running on your system and finally how to get it setup to start working with. At the end of this chapter you should understand why Git is around, why you should use it and you should be all setup to do so.
|
||||
|
||||
## About Version Control ##
|
||||
|
||||
What is version control, and why should you care? Version control is a system that records changes to a file or set of files over time so that you can recall specific versions later. Even though the examples in this book show software source code as the files under version control, in reality any type of file on a computer can be placed under version control.
|
||||
|
||||
If you are a graphic or web designer and want to keep every version of an image or layout (which you certainly would), it is very wise to use a Version Control System (VCS). A VCS allows you to: revert files back to a previous state, revert the entire project back to a previous state, review changes made over time, see who last modified something that might be causing a problem, who introduced an issue and when, and more. Using a VCS also means that if you screw things up or lose files, you can generally recover easily. In addition, you get all this for very little overhead.
|
||||
|
||||
### Local Version Control Systems ###
|
||||
|
||||
Many people’s version-control method of choice is to copy files into another directory (perhaps a time-stamped directory, if they’re clever). This approach is very common because it is so simple, but it is also incredibly error prone. It is easy to forget which directory you’re in and accidentally write to the wrong file or copy over files you don’t mean to.
|
||||
|
||||
To deal with this issue, programmers long ago developed local VCSs that had a simple database that kept all the changes to files under revision control (see Figure 1-1).
|
||||
|
||||
Insert 18333fig0101.png
|
||||
Figure 1-1. Local version control diagram.
|
||||
|
||||
One of the more popular VCS tools was a system called rcs, which is still distributed with many computers today. Even the popular Mac OS X operating system includes the rcs command when you install the Developer Tools. This tool basically works by keeping patch sets (that is, the differences between files) from one revision to another in a special format on disk; it can then recreate what any file looked like at any point in time by adding up all the patches.
|
||||
|
||||
### Centralized Version Control Systems ###
|
||||
|
||||
The next major issue that people encounter is that they need to collaborate with developers on other systems. To deal with this problem, Centralized Version Control Systems (CVCSs) were developed. These systems, such as CVS, Subversion, and Perforce, have a single server that contains all the versioned files, and a number of clients that check out files from that central place. For many years, this has been the standard for version control (see Figure 1-2).
|
||||
|
||||
Insert 18333fig0102.png
|
||||
Figure 1-2. Centralized version control diagram.
|
||||
|
||||
This setup offers many advantages, especially over local VCSs. For example, everyone knows to a certain degree what everyone else on the project is doing. Administrators have fine-grained control over who can do what; and it’s far easier to administer a CVCS than it is to deal with local databases on every client.
|
||||
|
||||
However, this setup also has some serious downsides. The most obvious is the single point of failure that the centralized server represents. If that server goes down for an hour, then during that hour nobody can collaborate at all or save versioned changes to anything they’re working on. If the hard disk the central database is on becomes corrupted, and proper backups haven’t been kept, you lose absolutely everything—the entire history of the project except whatever single snapshots people happen to have on their local machines. Local VCS systems suffer from this same problem—whenever you have the entire history of the project in a single place, you risk losing everything.
|
||||
|
||||
### Distributed Version Control Systems ###
|
||||
|
||||
This is where Distributed Version Control Systems (DVCSs) step in. In a DVCS (such as Git, Mercurial, Bazaar or Darcs), clients don’t just check out the latest snapshot of the files: they fully mirror the repository. Thus if any server dies, and these systems were collaborating via it, any of the client repositories can be copied back up to the server to restore it. Every checkout is really a full backup of all the data (see Figure 1-3).
|
||||
|
||||
Insert 18333fig0103.png
|
||||
Figure 1-3. Distributed version control diagram.
|
||||
|
||||
Furthermore, many of these systems deal pretty well with having several remote repositories they can work with, so you can collaborate with different groups of people in different ways simultaneously within the same project. This allows you to set up several types of workflows that aren’t possible in centralized systems, such as hierarchical models.
|
||||
|
||||
## A Short History of Git ##
|
||||
|
||||
As with many great things in life, Git began with a bit of creative destruction and fiery controversy. The Linux kernel is an open source software project of fairly large scope. For most of the lifetime of the Linux kernel maintenance (1991–2002), changes to the software were passed around as patches and archived files. In 2002, the Linux kernel project began using a proprietary DVCS system called BitKeeper.
|
||||
|
||||
In 2005, the relationship between the community that developed the Linux kernel and the commercial company that developed BitKeeper broke down, and the tool’s free-of-charge status was revoked. This prompted the Linux development community (and in particular Linus Torvalds, the creator of Linux) to develop their own tool based on some of the lessons they learned while using BitKeeper. Some of the goals of the new system were as follows:
|
||||
|
||||
* Speed
|
||||
* Simple design
|
||||
* Strong support for non-linear development (thousands of parallel branches)
|
||||
* Fully distributed
|
||||
* Able to handle large projects like the Linux kernel efficiently (speed and data size)
|
||||
|
||||
Since its birth in 2005, Git has evolved and matured to be easy to use and yet retain these initial qualities. It’s incredibly fast, it’s very efficient with large projects, and it has an incredible branching system for non-linear development (See Chapter 3).
|
||||
|
||||
## Git Basics ##
|
||||
|
||||
So, what is Git in a nutshell? This is an important section to absorb, because if you understand what Git is and the fundamentals of how it works, then using Git effectively will probably be much easier for you. As you learn Git, try to clear your mind of the things you may know about other VCSs, such as Subversion and Perforce; doing so will help you avoid subtle confusion when using the tool. Git stores and thinks about information much differently than these other systems, even though the user interface is fairly similar; understanding those differences will help prevent you from becoming confused while using it.
|
||||
|
||||
### Snapshots, Not Differences ###
|
||||
|
||||
The major difference between Git and any other VCS (Subversion and friends included) is the way Git thinks about its data. Conceptually, most other systems store information as a list of file-based changes. These systems (CVS, Subversion, Perforce, Bazaar, and so on) think of the information they keep as a set of files and the changes made to each file over time, as illustrated in Figure 1-4.
|
||||
|
||||
Insert 18333fig0104.png
|
||||
Figure 1-4. Other systems tend to store data as changes to a base version of each file.
|
||||
|
||||
Git doesn’t think of or store its data this way. Instead, Git thinks of its data more like a set of snapshots of a mini filesystem. Every time you commit, or save the state of your project in Git, it basically takes a picture of what all your files look like at that moment and stores a reference to that snapshot. To be efficient, if files have not changed, Git doesn’t store the file again—just a link to the previous identical file it has already stored. Git thinks about its data more like Figure 1-5.
|
||||
|
||||
Insert 18333fig0105.png
|
||||
Figure 1-5. Git stores data as snapshots of the project over time.
|
||||
|
||||
This is an important distinction between Git and nearly all other VCSs. It makes Git reconsider almost every aspect of version control that most other systems copied from the previous generation. This makes Git more like a mini filesystem with some incredibly powerful tools built on top of it, rather than simply a VCS. We’ll explore some of the benefits you gain by thinking of your data this way when we cover Git branching in Chapter 3.
|
||||
|
||||
### Nearly Every Operation Is Local ###
|
||||
|
||||
Most operations in Git only need local files and resources to operate — generally no information is needed from another computer on your network. If you’re used to a CVCS where most operations have that network latency overhead, this aspect of Git will make you think that the gods of speed have blessed Git with unworldly powers. Because you have the entire history of the project right there on your local disk, most operations seem almost instantaneous.
|
||||
|
||||
For example, to browse the history of the project, Git doesn’t need to go out to the server to get the history and display it for you—it simply reads it directly from your local database. This means you see the project history almost instantly. If you want to see the changes introduced between the current version of a file and the file a month ago, Git can look up the file a month ago and do a local difference calculation, instead of having to either ask a remote server to do it or pull an older version of the file from the remote server to do it locally.
|
||||
|
||||
This also means that there is very little you can’t do if you’re offline or off VPN. If you get on an airplane or a train and want to do a little work, you can commit happily until you get to a network connection to upload. If you go home and can’t get your VPN client working properly, you can still work. In many other systems, doing so is either impossible or painful. In Perforce, for example, you can’t do much when you aren’t connected to the server; and in Subversion and CVS, you can edit files, but you can’t commit changes to your database (because your database is offline). This may not seem like a huge deal, but you may be surprised what a big difference it can make.
|
||||
|
||||
### Git Has Integrity ###
|
||||
|
||||
Everything in Git is check-summed before it is stored and is then referred to by that checksum. This means it’s impossible to change the contents of any file or directory without Git knowing about it. This functionality is built into Git at the lowest levels and is integral to its philosophy. You can’t lose information in transit or get file corruption without Git being able to detect it.
|
||||
|
||||
The mechanism that Git uses for this checksumming is called a SHA-1 hash. This is a 40-character string composed of hexadecimal characters (0–9 and a–f) and calculated based on the contents of a file or directory structure in Git. A SHA-1 hash looks something like this:
|
||||
|
||||
24b9da6552252987aa493b52f8696cd6d3b00373
|
||||
|
||||
You will see these hash values all over the place in Git because it uses them so much. In fact, Git stores everything not by file name but in the Git database addressable by the hash value of its contents.
|
||||
|
||||
### Git Generally Only Adds Data ###
|
||||
|
||||
When you do actions in Git, nearly all of them only add data to the Git database. It is very difficult to get the system to do anything that is not undoable or to make it erase data in any way. As in any VCS, you can lose or mess up changes you haven’t committed yet; but after you commit a snapshot into Git, it is very difficult to lose, especially if you regularly push your database to another repository.
|
||||
|
||||
This makes using Git a joy because we know we can experiment without the danger of severely screwing things up. For a more in-depth look at how Git stores its data and how you can recover data that seems lost, see Chapter 9.
|
||||
|
||||
### The Three States ###
|
||||
|
||||
Now, pay attention. This is the main thing to remember about Git if you want the rest of your learning process to go smoothly. Git has three main states that your files can reside in: committed, modified, and staged. Committed means that the data is safely stored in your local database. Modified means that you have changed the file but have not committed it to your database yet. Staged means that you have marked a modified file in its current version to go into your next commit snapshot.
|
||||
|
||||
This leads us to the three main sections of a Git project: the Git directory, the working directory, and the staging area.
|
||||
|
||||
Insert 18333fig0106.png
|
||||
Figure 1-6. Working directory, staging area, and git directory.
|
||||
|
||||
The Git directory is where Git stores the metadata and object database for your project. This is the most important part of Git, and it is what is copied when you clone a repository from another computer.
|
||||
|
||||
The working directory is a single checkout of one version of the project. These files are pulled out of the compressed database in the Git directory and placed on disk for you to use or modify.
|
||||
|
||||
The staging area is a simple file, generally contained in your Git directory, that stores information about what will go into your next commit. It’s sometimes referred to as the index, but it’s becoming standard to refer to it as the staging area.
|
||||
|
||||
The basic Git workflow goes something like this:
|
||||
|
||||
1. You modify files in your working directory.
|
||||
2. You stage the files, adding snapshots of them to your staging area.
|
||||
3. You do a commit, which takes the files as they are in the staging area and stores that snapshot permanently to your Git directory.
|
||||
|
||||
If a particular version of a file is in the git directory, it’s considered committed. If it’s modified but has been added to the staging area, it is staged. And if it was changed since it was checked out but has not been staged, it is modified. In Chapter 2, you’ll learn more about these states and how you can either take advantage of them or skip the staged part entirely.
|
||||
|
||||
## Installing Git ##
|
||||
|
||||
Let’s get into using some Git. First things first—you have to install it. You can get it a number of ways; the two major ones are to install it from source or to install an existing package for your platform.
|
||||
|
||||
### Installing from Source ###
|
||||
|
||||
If you can, it’s generally useful to install Git from source, because you’ll get the most recent version. Each version of Git tends to include useful UI enhancements, so getting the latest version is often the best route if you feel comfortable compiling software from source. It is also the case that many Linux distributions contain very old packages; so unless you’re on a very up-to-date distro or are using backports, installing from source may be the best bet.
|
||||
|
||||
To install Git, you need to have the following libraries that Git depends on: curl, zlib, openssl, expat, and libiconv. For example, if you’re on a system that has yum (such as Fedora) or apt-get (such as a Debian based system), you can use one of these commands to install all of the dependencies:
|
||||
|
||||
$ yum install curl-devel expat-devel gettext-devel \
|
||||
openssl-devel zlib-devel
|
||||
|
||||
$ apt-get install libcurl4-gnutls-dev libexpat1-dev gettext \
|
||||
libz-dev libssl-dev
|
||||
|
||||
When you have all the necessary dependencies, you can go ahead and grab the latest snapshot from the Git web site:
|
||||
|
||||
http://git-scm.com/download
|
||||
|
||||
Then, compile and install:
|
||||
|
||||
$ tar -zxf git-1.7.2.2.tar.gz
|
||||
$ cd git-1.7.2.2
|
||||
$ make prefix=/usr/local all
|
||||
$ sudo make prefix=/usr/local install
|
||||
|
||||
After this is done, you can also get Git via Git itself for updates:
|
||||
|
||||
$ git clone git://git.kernel.org/pub/scm/git/git.git
|
||||
|
||||
### Installing on Linux ###
|
||||
|
||||
If you want to install Git on Linux via a binary installer, you can generally do so through the basic package-management tool that comes with your distribution. If you’re on Fedora, you can use yum:
|
||||
|
||||
$ yum install git-core
|
||||
|
||||
Or if you’re on a Debian-based distribution like Ubuntu, try apt-get:
|
||||
|
||||
$ apt-get install git
|
||||
|
||||
### Installing on Mac ###
|
||||
|
||||
There are two easy ways to install Git on a Mac. The easiest is to use the graphical Git installer, which you can download from the Google Code page (see Figure 1-7):
|
||||
|
||||
http://code.google.com/p/git-osx-installer
|
||||
|
||||
Insert 18333fig0107.png
|
||||
Figure 1-7. Git OS X installer.
|
||||
|
||||
The other major way is to install Git via MacPorts (`http://www.macports.org`). If you have MacPorts installed, install Git via
|
||||
|
||||
$ sudo port install git-core +svn +doc +bash_completion +gitweb
|
||||
|
||||
You don’t have to add all the extras, but you’ll probably want to include +svn in case you ever have to use Git with Subversion repositories (see Chapter 8).
|
||||
|
||||
### Installing on Windows ###
|
||||
|
||||
Installing Git on Windows is very easy. The msysGit project has one of the easier installation procedures. Simply download the installer exe file from the GitHub page, and run it:
|
||||
|
||||
http://msysgit.github.com/
|
||||
|
||||
After it’s installed, you have both a command-line version (including an SSH client that will come in handy later) and the standard GUI.
|
||||
|
||||
Note on Windows usage: you should use Git with the provided msysGit shell (Unix style), it allows to use the complex lines of command given in this book. If you need, for some reason, to use the native Windows shell / command line console, you have to use double quotes instead of simple quotes (for parameters with spaces in them) and you must quote the parameters ending with the circumflex accent (^) if they are last on the line, as it is a continuation symbol in Windows.
|
||||
|
||||
## First-Time Git Setup ##
|
||||
|
||||
Now that you have Git on your system, you’ll want to do a few things to customize your Git environment. You should have to do these things only once; they’ll stick around between upgrades. You can also change them at any time by running through the commands again.
|
||||
|
||||
Git comes with a tool called git config that lets you get and set configuration variables that control all aspects of how Git looks and operates. These variables can be stored in three different places:
|
||||
|
||||
* `/etc/gitconfig` file: Contains values for every user on the system and all their repositories. If you pass the option` --system` to `git config`, it reads and writes from this file specifically.
|
||||
* `~/.gitconfig` file: Specific to your user. You can make Git read and write to this file specifically by passing the `--global` option.
|
||||
* config file in the git directory (that is, `.git/config`) of whatever repository you’re currently using: Specific to that single repository. Each level overrides values in the previous level, so values in `.git/config` trump those in `/etc/gitconfig`.
|
||||
|
||||
On Windows systems, Git looks for the `.gitconfig` file in the `$HOME` directory (`%USERPROFILE%` in Windows’ environment), which is `C:\Documents and Settings\$USER` or `C:\Users\$USER` for most people, depending on version (`$USER` is `%USERNAME%` in Windows’ environment). It also still looks for /etc/gitconfig, although it’s relative to the MSys root, which is wherever you decide to install Git on your Windows system when you run the installer.
|
||||
|
||||
### Your Identity ###
|
||||
|
||||
The first thing you should do when you install Git is to set your user name and e-mail address. This is important because every Git commit uses this information, and it’s immutably baked into the commits you pass around:
|
||||
|
||||
$ git config --global user.name "John Doe"
|
||||
$ git config --global user.email johndoe@example.com
|
||||
|
||||
Again, you need to do this only once if you pass the `--global` option, because then Git will always use that information for anything you do on that system. If you want to override this with a different name or e-mail address for specific projects, you can run the command without the `--global` option when you’re in that project.
|
||||
|
||||
### Your Editor ###
|
||||
|
||||
Now that your identity is set up, you can configure the default text editor that will be used when Git needs you to type in a message. By default, Git uses your system’s default editor, which is generally Vi or Vim. If you want to use a different text editor, such as Emacs, you can do the following:
|
||||
|
||||
$ git config --global core.editor emacs
|
||||
|
||||
### Your Diff Tool ###
|
||||
|
||||
Another useful option you may want to configure is the default diff tool to use to resolve merge conflicts. Say you want to use vimdiff:
|
||||
|
||||
$ git config --global merge.tool vimdiff
|
||||
|
||||
Git accepts kdiff3, tkdiff, meld, xxdiff, emerge, vimdiff, gvimdiff, ecmerge, and opendiff as valid merge tools. You can also set up a custom tool; see Chapter 7 for more information about doing that.
|
||||
|
||||
### Checking Your Settings ###
|
||||
|
||||
If you want to check your settings, you can use the `git config --list` command to list all the settings Git can find at that point:
|
||||
|
||||
$ git config --list
|
||||
user.name=Scott Chacon
|
||||
user.email=schacon@gmail.com
|
||||
color.status=auto
|
||||
color.branch=auto
|
||||
color.interactive=auto
|
||||
color.diff=auto
|
||||
...
|
||||
|
||||
You may see keys more than once, because Git reads the same key from different files (`/etc/gitconfig` and `~/.gitconfig`, for example). In this case, Git uses the last value for each unique key it sees.
|
||||
|
||||
You can also check what Git thinks a specific key’s value is by typing `git config {key}`:
|
||||
|
||||
$ git config user.name
|
||||
Scott Chacon
|
||||
|
||||
## Getting Help ##
|
||||
|
||||
If you ever need help while using Git, there are three ways to get the manual page (manpage) help for any of the Git commands:
|
||||
|
||||
$ git help <verb>
|
||||
$ git <verb> --help
|
||||
$ man git-<verb>
|
||||
|
||||
For example, you can get the manpage help for the config command by running
|
||||
|
||||
$ git help config
|
||||
|
||||
These commands are nice because you can access them anywhere, even offline.
|
||||
If the manpages and this book aren’t enough and you need in-person help, you can try the `#git` or `#github` channel on the Freenode IRC server (irc.freenode.net). These channels are regularly filled with hundreds of people who are all very knowledgeable about Git and are often willing to help.
|
||||
|
||||
## Summary ##
|
||||
|
||||
You should have a basic understanding of what Git is and how it’s different from the CVCS you may have been using. You should also now have a working version of Git on your system that’s set up with your personal identity. It’s now time to learn some Git basics.
|
||||
|
245
parser/page.go
Normal file
245
parser/page.go
Normal file
|
@ -0,0 +1,245 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
const (
|
||||
HTML_LEAD = "<"
|
||||
YAML_LEAD = "-"
|
||||
YAML_DELIM_UNIX = "---\n"
|
||||
YAML_DELIM_DOS = "---\r\n"
|
||||
TOML_LEAD = "+"
|
||||
TOML_DELIM_UNIX = "+++\n"
|
||||
TOML_DELIM_DOS = "+++\r\n"
|
||||
JAVA_LEAD = "{"
|
||||
)
|
||||
|
||||
var (
|
||||
delims = [][]byte{
|
||||
[]byte(YAML_DELIM_UNIX),
|
||||
[]byte(YAML_DELIM_DOS),
|
||||
[]byte(TOML_DELIM_UNIX),
|
||||
[]byte(TOML_DELIM_DOS),
|
||||
[]byte(JAVA_LEAD),
|
||||
}
|
||||
|
||||
unixEnding = []byte("\n")
|
||||
dosEnding = []byte("\r\n")
|
||||
)
|
||||
|
||||
type FrontMatter []byte
|
||||
type Content []byte
|
||||
|
||||
type Page interface {
|
||||
FrontMatter() FrontMatter
|
||||
Content() Content
|
||||
}
|
||||
|
||||
type page struct {
|
||||
render bool
|
||||
frontmatter FrontMatter
|
||||
content Content
|
||||
}
|
||||
|
||||
func (p *page) Content() Content {
|
||||
return p.content
|
||||
}
|
||||
|
||||
func (p *page) FrontMatter() FrontMatter {
|
||||
return p.frontmatter
|
||||
}
|
||||
|
||||
// ReadFrom reads the content from an io.Reader and constructs a page.
|
||||
func ReadFrom(r io.Reader) (p Page, err error) {
|
||||
reader := bufio.NewReader(r)
|
||||
|
||||
if err = chompWhitespace(reader); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
firstLine, err := peekLine(reader)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
newp := new(page)
|
||||
newp.render = shouldRender(firstLine)
|
||||
|
||||
if newp.render && isFrontMatterDelim(firstLine) {
|
||||
left, right := determineDelims(firstLine)
|
||||
fm, err := extractFrontMatterDelims(reader, left, right)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
newp.frontmatter = fm
|
||||
}
|
||||
|
||||
content, err := extractContent(reader)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newp.content = content
|
||||
|
||||
return newp, nil
|
||||
}
|
||||
|
||||
func chompWhitespace(r io.RuneScanner) (err error) {
|
||||
for {
|
||||
c, _, err := r.ReadRune()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !unicode.IsSpace(c) {
|
||||
r.UnreadRune()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func peekLine(r *bufio.Reader) (line []byte, err error) {
|
||||
firstFive, err := r.Peek(5)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
idx := bytes.IndexByte(firstFive, '\n')
|
||||
if idx == -1 {
|
||||
return firstFive, nil
|
||||
}
|
||||
idx += 1 // include newline.
|
||||
return firstFive[:idx], nil
|
||||
}
|
||||
|
||||
func shouldRender(lead []byte) (frontmatter bool) {
|
||||
if len(lead) <= 0 {
|
||||
return
|
||||
}
|
||||
|
||||
if bytes.Equal(lead[:1], []byte(HTML_LEAD)) {
|
||||
return
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func isFrontMatterDelim(data []byte) bool {
|
||||
for _, d := range delims {
|
||||
if bytes.HasPrefix(data, d) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func determineDelims(firstLine []byte) (left, right []byte) {
|
||||
switch len(firstLine) {
|
||||
case 4:
|
||||
if firstLine[0] == YAML_LEAD[0] {
|
||||
return []byte(YAML_DELIM_UNIX), []byte(YAML_DELIM_UNIX)
|
||||
}
|
||||
return []byte(TOML_DELIM_UNIX), []byte(TOML_DELIM_UNIX)
|
||||
|
||||
case 5:
|
||||
if firstLine[0] == YAML_LEAD[0] {
|
||||
return []byte(YAML_DELIM_DOS), []byte(YAML_DELIM_DOS)
|
||||
}
|
||||
return []byte(TOML_DELIM_DOS), []byte(TOML_DELIM_DOS)
|
||||
case 3:
|
||||
fallthrough
|
||||
case 2:
|
||||
fallthrough
|
||||
case 1:
|
||||
return []byte(JAVA_LEAD), []byte("}")
|
||||
default:
|
||||
panic(fmt.Sprintf("Unable to determine delims from %q", firstLine))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func extractFrontMatterDelims(r *bufio.Reader, left, right []byte) (fm FrontMatter, err error) {
|
||||
var level int = 0
|
||||
var sameDelim = bytes.Equal(left, right)
|
||||
wr := new(bytes.Buffer)
|
||||
for {
|
||||
c, err := r.ReadByte()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch c {
|
||||
case left[0]:
|
||||
match, err := matches(r, wr, []byte{c}, left)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if match {
|
||||
if sameDelim {
|
||||
if level == 0 {
|
||||
level = 1
|
||||
} else {
|
||||
level = 0
|
||||
}
|
||||
} else {
|
||||
level += 1
|
||||
}
|
||||
}
|
||||
case right[0]:
|
||||
match, err := matches(r, wr, []byte{c}, right)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if match {
|
||||
level -= 1
|
||||
}
|
||||
default:
|
||||
if err = wr.WriteByte(c); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if level == 0 && !unicode.IsSpace(rune(c)) {
|
||||
if err = chompWhitespace(r); err != nil {
|
||||
if err != io.EOF {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return wr.Bytes(), nil
|
||||
}
|
||||
}
|
||||
return nil, errors.New("Could not find front matter.")
|
||||
}
|
||||
|
||||
func matches(r *bufio.Reader, wr io.Writer, c, expected []byte) (ok bool, err error) {
|
||||
if len(expected) == 1 {
|
||||
if _, err = wr.Write(c); err != nil {
|
||||
return
|
||||
}
|
||||
return bytes.Equal(c, expected), nil
|
||||
}
|
||||
buf := make([]byte, len(expected)-1)
|
||||
if _, err = r.Read(buf); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
buf = append(c, buf...)
|
||||
if _, err = wr.Write(buf); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return bytes.Equal(expected, buf), nil
|
||||
}
|
||||
|
||||
func extractContent(r io.Reader) (content Content, err error) {
|
||||
wr := new(bytes.Buffer)
|
||||
if _, err = wr.ReadFrom(r); err != nil {
|
||||
return
|
||||
}
|
||||
return wr.Bytes(), nil
|
||||
}
|
292
parser/parse_frontmatter_test.go
Normal file
292
parser/parse_frontmatter_test.go
Normal file
|
@ -0,0 +1,292 @@
|
|||
package parser
|
||||
|
||||
// TODO Support Mac Encoding (\r)
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var (
|
||||
CONTENT_EMPTY = ""
|
||||
CONTENT_NO_FRONTMATTER = "a page with no front matter"
|
||||
CONTENT_WITH_FRONTMATTER = "---\ntitle: front matter\n---\nContent with front matter"
|
||||
CONTENT_HTML_NODOCTYPE = "<html>\n\t<body>\n\t</body>\n</html>"
|
||||
CONTENT_HTML_WITHDOCTYPE = "<!doctype html><html><body></body></html>"
|
||||
CONTENT_HTML_WITH_FRONTMATTER = "---\ntitle: front matter\n---\n<!doctype><html><body></body></html>"
|
||||
CONTENT_LWS_HTML = " <html><body></body></html>"
|
||||
CONTENT_LWS_LF_HTML = "\n<html><body></body></html>"
|
||||
CONTENT_INCOMPLETE_BEG_FM_DELIM = "--\ntitle: incomplete beg fm delim\n---\nincomplete frontmatter delim"
|
||||
CONTENT_INCOMPLETE_END_FM_DELIM = "---\ntitle: incomplete end fm delim\n--\nincomplete frontmatter delim"
|
||||
CONTENT_MISSING_END_FM_DELIM = "---\ntitle: incomplete end fm delim\nincomplete frontmatter delim"
|
||||
CONTENT_FM_NO_DOC = "---\ntitle: no doc\n---"
|
||||
CONTENT_WITH_JS_FM = "{\n \"categories\": \"d\",\n \"tags\": [\n \"a\", \n \"b\", \n \"c\"\n ]\n}\nJSON Front Matter with tags and categories"
|
||||
)
|
||||
|
||||
var lineEndings = []string{"\n", "\r\n"}
|
||||
var delimiters = []string{"-", "+"}
|
||||
|
||||
func pageMust(p Page, err error) *page {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return p.(*page)
|
||||
}
|
||||
|
||||
func pageRecoverAndLog(t *testing.T) {
|
||||
if err := recover(); err != nil {
|
||||
t.Errorf("panic/recover: %s\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDegenerateCreatePageFrom(t *testing.T) {
|
||||
tests := []struct {
|
||||
content string
|
||||
}{
|
||||
{CONTENT_EMPTY},
|
||||
{CONTENT_MISSING_END_FM_DELIM},
|
||||
{CONTENT_INCOMPLETE_END_FM_DELIM},
|
||||
{CONTENT_FM_NO_DOC},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
for _, ending := range lineEndings {
|
||||
test.content = strings.Replace(test.content, "\n", ending, -1)
|
||||
_, err := ReadFrom(strings.NewReader(test.content))
|
||||
if err == nil {
|
||||
t.Errorf("Content should return an err:\n%q\n", test.content)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func checkPageRender(t *testing.T, p *page, expected bool) {
|
||||
if p.render != expected {
|
||||
t.Errorf("page.render should be %t, got: %t", expected, p.render)
|
||||
}
|
||||
}
|
||||
|
||||
func checkPageFrontMatterIsNil(t *testing.T, p *page, content string, expected bool) {
|
||||
if bool(p.frontmatter == nil) != expected {
|
||||
t.Logf("\n%q\n", content)
|
||||
t.Errorf("page.frontmatter == nil? %t, got %t", expected, p.frontmatter == nil)
|
||||
}
|
||||
}
|
||||
|
||||
func checkPageFrontMatterContent(t *testing.T, p *page, frontMatter string) {
|
||||
if p.frontmatter == nil {
|
||||
return
|
||||
}
|
||||
if !bytes.Equal(p.frontmatter, []byte(frontMatter)) {
|
||||
t.Errorf("expected frontmatter %q, got %q", frontMatter, p.frontmatter)
|
||||
}
|
||||
}
|
||||
|
||||
func checkPageContent(t *testing.T, p *page, expected string) {
|
||||
if !bytes.Equal(p.content, []byte(expected)) {
|
||||
t.Errorf("expected content %q, got %q", expected, p.content)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStandaloneCreatePageFrom(t *testing.T) {
|
||||
tests := []struct {
|
||||
content string
|
||||
expectedMustRender bool
|
||||
frontMatterIsNil bool
|
||||
frontMatter string
|
||||
bodycontent string
|
||||
}{
|
||||
{CONTENT_NO_FRONTMATTER, true, true, "", "a page with no front matter"},
|
||||
{CONTENT_WITH_FRONTMATTER, true, false, "---\ntitle: front matter\n---\n", "Content with front matter"},
|
||||
{CONTENT_HTML_NODOCTYPE, false, true, "", "<html>\n\t<body>\n\t</body>\n</html>"},
|
||||
{CONTENT_HTML_WITHDOCTYPE, false, true, "", "<!doctype html><html><body></body></html>"},
|
||||
{CONTENT_HTML_WITH_FRONTMATTER, true, false, "---\ntitle: front matter\n---\n", "<!doctype><html><body></body></html>"},
|
||||
{CONTENT_LWS_HTML, false, true, "", "<html><body></body></html>"},
|
||||
{CONTENT_LWS_LF_HTML, false, true, "", "<html><body></body></html>"},
|
||||
{CONTENT_WITH_JS_FM, true, false, "{\n \"categories\": \"d\",\n \"tags\": [\n \"a\", \n \"b\", \n \"c\"\n ]\n}", "JSON Front Matter with tags and categories"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
for _, ending := range lineEndings {
|
||||
test.content = strings.Replace(test.content, "\n", ending, -1)
|
||||
test.frontMatter = strings.Replace(test.frontMatter, "\n", ending, -1)
|
||||
test.bodycontent = strings.Replace(test.bodycontent, "\n", ending, -1)
|
||||
|
||||
p := pageMust(ReadFrom(strings.NewReader(test.content)))
|
||||
|
||||
checkPageRender(t, p, test.expectedMustRender)
|
||||
checkPageFrontMatterIsNil(t, p, test.content, test.frontMatterIsNil)
|
||||
checkPageFrontMatterContent(t, p, test.frontMatter)
|
||||
checkPageContent(t, p, test.bodycontent)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkLongFormRender(b *testing.B) {
|
||||
|
||||
tests := []struct {
|
||||
filename string
|
||||
buf []byte
|
||||
}{
|
||||
{filename: "long_text_test.md"},
|
||||
}
|
||||
for i, test := range tests {
|
||||
path := filepath.FromSlash(test.filename)
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
b.Fatalf("Unable to open %s: %s", path, err)
|
||||
}
|
||||
defer f.Close()
|
||||
membuf := new(bytes.Buffer)
|
||||
if _, err := io.Copy(membuf, f); err != nil {
|
||||
b.Fatalf("Unable to read %s: %s", path, err)
|
||||
}
|
||||
tests[i].buf = membuf.Bytes()
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i <= b.N; i++ {
|
||||
for _, test := range tests {
|
||||
ReadFrom(bytes.NewReader(test.buf))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestPageShouldRender(t *testing.T) {
|
||||
tests := []struct {
|
||||
content []byte
|
||||
expected bool
|
||||
}{
|
||||
{[]byte{}, false},
|
||||
{[]byte{'<'}, false},
|
||||
{[]byte{'-'}, true},
|
||||
{[]byte("--"), true},
|
||||
{[]byte("---"), true},
|
||||
{[]byte("---\n"), true},
|
||||
{[]byte{'a'}, true},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
for _, ending := range lineEndings {
|
||||
test.content = bytes.Replace(test.content, []byte("\n"), []byte(ending), -1)
|
||||
if render := shouldRender(test.content); render != test.expected {
|
||||
|
||||
t.Errorf("Expected %s to shouldRender = %t, got: %t", test.content, test.expected, render)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestPageHasFrontMatter(t *testing.T) {
|
||||
tests := []struct {
|
||||
content []byte
|
||||
expected bool
|
||||
}{
|
||||
{[]byte{'-'}, false},
|
||||
{[]byte("--"), false},
|
||||
{[]byte("---"), false},
|
||||
{[]byte("---\n"), true},
|
||||
{[]byte("---\n"), true},
|
||||
{[]byte{'a'}, false},
|
||||
{[]byte{'{'}, true},
|
||||
{[]byte("{\n "), true},
|
||||
{[]byte{'}'}, false},
|
||||
}
|
||||
for _, test := range tests {
|
||||
for _, ending := range lineEndings {
|
||||
test.content = bytes.Replace(test.content, []byte("\n"), []byte(ending), -1)
|
||||
if isFrontMatterDelim := isFrontMatterDelim(test.content); isFrontMatterDelim != test.expected {
|
||||
t.Errorf("Expected %q isFrontMatterDelim = %t, got: %t", test.content, test.expected, isFrontMatterDelim)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractFrontMatter(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
frontmatter string
|
||||
extracted []byte
|
||||
errIsNil bool
|
||||
}{
|
||||
{"", nil, false},
|
||||
{"-", nil, false},
|
||||
{"---\n", nil, false},
|
||||
{"---\nfoobar", nil, false},
|
||||
{"---\nfoobar\nbarfoo\nfizbaz\n", nil, false},
|
||||
{"---\nblar\n-\n", nil, false},
|
||||
{"---\nralb\n---\n", []byte("---\nralb\n---\n"), true},
|
||||
{"---\nminc\n---\ncontent", []byte("---\nminc\n---\n"), true},
|
||||
{"---\ncnim\n---\ncontent\n", []byte("---\ncnim\n---\n"), true},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
for _, ending := range lineEndings {
|
||||
test.frontmatter = strings.Replace(test.frontmatter, "\n", ending, -1)
|
||||
test.extracted = bytes.Replace(test.extracted, []byte("\n"), []byte(ending), -1)
|
||||
for _, delim := range delimiters {
|
||||
test.frontmatter = strings.Replace(test.frontmatter, "-", delim, -1)
|
||||
test.extracted = bytes.Replace(test.extracted, []byte("-"), []byte(delim), -1)
|
||||
line, err := peekLine(bufio.NewReader(strings.NewReader(test.frontmatter)))
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
l, r := determineDelims(line)
|
||||
fm, err := extractFrontMatterDelims(bufio.NewReader(strings.NewReader(test.frontmatter)), l, r)
|
||||
if (err == nil) != test.errIsNil {
|
||||
t.Logf("\n%q\n", string(test.frontmatter))
|
||||
t.Errorf("Expected err == nil => %t, got: %t. err: %s", test.errIsNil, err == nil, err)
|
||||
continue
|
||||
}
|
||||
if !bytes.Equal(fm, test.extracted) {
|
||||
t.Logf("\n%q\n", string(test.frontmatter))
|
||||
t.Errorf("Expected front matter %q. got %q", string(test.extracted), fm)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractFrontMatterDelim(t *testing.T) {
|
||||
var (
|
||||
noErrExpected = true
|
||||
errExpected = false
|
||||
)
|
||||
tests := []struct {
|
||||
frontmatter string
|
||||
extracted string
|
||||
errIsNil bool
|
||||
}{
|
||||
{"", "", errExpected},
|
||||
{"{", "", errExpected},
|
||||
{"{}", "{}", noErrExpected},
|
||||
{" {}", " {}", noErrExpected},
|
||||
{"{} ", "{}", noErrExpected},
|
||||
{"{ } ", "{ }", noErrExpected},
|
||||
{"{ { }", "", errExpected},
|
||||
{"{ { } }", "{ { } }", noErrExpected},
|
||||
{"{ { } { } }", "{ { } { } }", noErrExpected},
|
||||
{"{\n{\n}\n}\n", "{\n{\n}\n}", noErrExpected},
|
||||
{"{\n \"categories\": \"d\",\n \"tags\": [\n \"a\", \n \"b\", \n \"c\"\n ]\n}\nJSON Front Matter with tags and categories", "{\n \"categories\": \"d\",\n \"tags\": [\n \"a\", \n \"b\", \n \"c\"\n ]\n}", noErrExpected},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
fm, err := extractFrontMatterDelims(bufio.NewReader(strings.NewReader(test.frontmatter)), []byte("{"), []byte("}"))
|
||||
if (err == nil) != test.errIsNil {
|
||||
t.Logf("\n%q\n", string(test.frontmatter))
|
||||
t.Errorf("Expected err == nil => %t, got: %t. err: %s", test.errIsNil, err == nil, err)
|
||||
continue
|
||||
}
|
||||
if !bytes.Equal(fm, []byte(test.extracted)) {
|
||||
t.Logf("\n%q\n", string(test.frontmatter))
|
||||
t.Errorf("Expected front matter %q. got %q", string(test.extracted), fm)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in a new issue