Do a thing
This commit is contained in:
parent
0a0b3745d0
commit
cb7a238c80
1 changed files with 72 additions and 2 deletions
|
@ -2,12 +2,19 @@ package parser
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// THE HYLIA PARSER
|
// THE HYLIA PARSER
|
||||||
|
|
||||||
|
type Element struct {
|
||||||
|
Name string
|
||||||
|
Content string
|
||||||
|
FilePath string
|
||||||
|
}
|
||||||
|
|
||||||
func ParseFile(filename string) ([]string error) {
|
func ParseFile(filename string) ([]string error) {
|
||||||
// Hey, does the file we're trying to parse actually exist?
|
// Hey, does the file we're trying to parse actually exist?
|
||||||
data, err := ioutil.ReadFile(filename)
|
data, err := ioutil.ReadFile(filename)
|
||||||
|
@ -21,10 +28,73 @@ func ParseFile(filename string) ([]string error) {
|
||||||
return nil, errors.New("The structure of this file is invalid. Are you sure this is a Hylia (.hy) file?")
|
return nil, errors.New("The structure of this file is invalid. Are you sure this is a Hylia (.hy) file?")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract the custom elements, which are wrapped in <element> tags
|
// Extract the actual Hylia content
|
||||||
|
start := strings.Index(content, "<hylia>")
|
||||||
|
end := strings.Index(content, "</hylia>")
|
||||||
|
if start == -1 || end == -1 {
|
||||||
|
return nil, errors.New("Missing <hylia> tags. Are you sure this is a Hylia (.hy) file?")
|
||||||
|
}
|
||||||
|
|
||||||
|
content = content[start+len("<hylia>") : end]
|
||||||
|
|
||||||
|
// Extract the custom elements, which are wrapped in <element> tags and handle imports
|
||||||
elements := []string{}
|
elements := []string{}
|
||||||
// ... TODO
|
lines := strings.Split(content, "\n")
|
||||||
|
for _, line := range lines {
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
if strings.HasPrefix(line, <"element") {
|
||||||
|
// Extract the element name
|
||||||
|
name := extractAttributeValue(line, "name")
|
||||||
|
if name == "" {
|
||||||
|
return nil, errors.New("<element> tag is missing a name. ('name' attribute)")
|
||||||
|
}
|
||||||
|
content, err := extractElementContent(line, content)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
elements = append(elements, Element{Name: name, Content: content, FilePath: filename})
|
||||||
|
} else if strings.HasPrefix(line, "<import") {
|
||||||
|
filePath := extractAttributeValue(line, file)
|
||||||
|
if filePath == "" {
|
||||||
|
return nil, errors.New("<import> tag is missing a file ('file' attribute)")
|
||||||
|
}
|
||||||
|
// Recursively parse
|
||||||
|
importedElements, err := ParseFile(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("ERROR: Could not import file %s: %w", filePath, err)
|
||||||
|
}
|
||||||
|
elements = append(elements, importedElements...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return elements, nil
|
return elements, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extracts an attribute
|
||||||
|
func extractAttributeValue(tag, attribute string) string {
|
||||||
|
prefix := fmt.Sprintf(`%s="`, attribute)
|
||||||
|
start := strings.Index(tag, prefix)
|
||||||
|
if start == -1 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
start += len(prefix)
|
||||||
|
end := strings.Index(tag[start:], `"`)
|
||||||
|
if end == -1 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return tag[start : start+end]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extracts an <element>
|
||||||
|
func extractElementContent(tag, content string) (string, error) {
|
||||||
|
start := strings.Index(content, tag)
|
||||||
|
if start == -1 {
|
||||||
|
return "", errors.New("element tag not found in content")
|
||||||
|
}
|
||||||
|
end := strings.Index(content[start:], "</element>")
|
||||||
|
if end == -1 {
|
||||||
|
return "", errors.New("missing end tag of element")
|
||||||
|
}
|
||||||
|
return content[start+len(tag) : start+end], nil
|
||||||
}
|
}
|
Loading…
Reference in a new issue