Commit 0e67fa18 authored by Nick Thomas's avatar Nick Thomas

Merge branch 'id-lsif-processing' into 'master'

Process LSIF document before sending it to GitLab

See merge request gitlab-org/gitlab-workhorse!492
parents f3677174 d2ad5242
---
title: Process LSIF document before sending it to GitLab
merge_request: 492
author:
type: added
......@@ -5,6 +5,7 @@ go 1.12
require (
github.com/BurntSushi/toml v0.3.1
github.com/FZambia/sentinel v1.0.0
github.com/alecthomas/chroma v0.7.3
github.com/dgrijalva/jwt-go v3.2.0+incompatible
github.com/getsentry/raven-go v0.1.2
github.com/golang/gddo v0.0.0-20190419222130-af0f2af80721
......@@ -19,13 +20,13 @@ require (
github.com/rafaeljusto/redigomock v0.0.0-20190202135759-257e089e14a1
github.com/sebest/xff v0.0.0-20160910043805-6c115e0ffa35
github.com/sirupsen/logrus v1.3.0
github.com/stretchr/testify v1.4.0
github.com/stretchr/testify v1.5.1
gitlab.com/gitlab-org/gitaly v1.74.0
gitlab.com/gitlab-org/labkit v0.0.0-20200327153541-fac94cb428e6
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5 // indirect
golang.org/x/tools v0.0.0-20200117161641-43d50277825c
google.golang.org/grpc v1.24.0
gopkg.in/yaml.v2 v2.2.8 // indirect
honnef.co/go/tools v0.0.1-2019.2.3
)
This diff is collapsed.
......@@ -131,6 +131,8 @@ type Response struct {
Repository gitalypb.Repository
// For git-http, does the requestor have the right to view all refs?
ShowAllRefs bool
// Detects whether an artifact is used for code intelligence
ProcessLsif bool
}
// singleJoiningSlash is taken from reverseproxy.go:NewSingleHostReverseProxy
......
......@@ -118,11 +118,15 @@ type testServer struct {
cleanup func()
}
func setupWithTmpPath(t *testing.T, filename string, bodyProcessor func(w http.ResponseWriter, r *http.Request)) *testServer {
func setupWithTmpPath(t *testing.T, filename string, authResponse *api.Response, bodyProcessor func(w http.ResponseWriter, r *http.Request)) *testServer {
tempPath, err := ioutil.TempDir("", "uploads")
require.NoError(t, err)
ts := testArtifactsUploadServer(t, api.Response{TempPath: tempPath}, bodyProcessor)
if authResponse == nil {
authResponse = &api.Response{TempPath: tempPath}
}
ts := testArtifactsUploadServer(t, *authResponse, bodyProcessor)
var buffer bytes.Buffer
writer := multipart.NewWriter(&buffer)
......@@ -155,7 +159,7 @@ func testUploadArtifacts(t *testing.T, contentType, url string, body io.Reader)
}
func TestUploadHandlerAddingMetadata(t *testing.T) {
s := setupWithTmpPath(t, "file",
s := setupWithTmpPath(t, "file", nil,
func(w http.ResponseWriter, r *http.Request) {
token, err := jwt.ParseWithClaims(r.Header.Get(upload.RewrittenFieldsHeader), &upload.MultipartClaims{}, testhelper.ParseJWT)
require.NoError(t, err)
......@@ -185,7 +189,7 @@ func TestUploadHandlerAddingMetadata(t *testing.T) {
}
func TestUploadHandlerForUnsupportedArchive(t *testing.T) {
s := setupWithTmpPath(t, "file", nil)
s := setupWithTmpPath(t, "file", nil, nil)
defer s.cleanup()
require.NoError(t, s.writer.Close())
......@@ -195,7 +199,7 @@ func TestUploadHandlerForUnsupportedArchive(t *testing.T) {
}
func TestUploadHandlerForMultipleFiles(t *testing.T) {
s := setupWithTmpPath(t, "file", nil)
s := setupWithTmpPath(t, "file", nil, nil)
defer s.cleanup()
file, err := s.writer.CreateFormFile("file", "my.file")
......@@ -208,8 +212,47 @@ func TestUploadHandlerForMultipleFiles(t *testing.T) {
}
func TestUploadFormProcessing(t *testing.T) {
s := setupWithTmpPath(t, "metadata", nil)
s := setupWithTmpPath(t, "metadata", nil, nil)
defer s.cleanup()
require.NoError(t, s.writer.Close())
response := testUploadArtifacts(t, s.writer.FormDataContentType(), s.url, s.buffer)
testhelper.AssertResponseCode(t, response, http.StatusInternalServerError)
}
func TestLsifFileProcessing(t *testing.T) {
tempPath, err := ioutil.TempDir("", "uploads")
require.NoError(t, err)
s := setupWithTmpPath(t, "file", &api.Response{TempPath: tempPath, ProcessLsif: true}, nil)
defer s.cleanup()
file, err := os.Open("../../testdata/lsif/valid.lsif.zip")
require.NoError(t, err)
_, err = io.Copy(s.fileWriter, file)
require.NoError(t, err)
require.NoError(t, file.Close())
require.NoError(t, s.writer.Close())
response := testUploadArtifacts(t, s.writer.FormDataContentType(), s.url, s.buffer)
testhelper.AssertResponseCode(t, response, http.StatusOK)
testhelper.AssertResponseHeader(t, response, MetadataHeaderKey, MetadataHeaderPresent)
}
func TestInvalidLsifFileProcessing(t *testing.T) {
tempPath, err := ioutil.TempDir("", "uploads")
require.NoError(t, err)
s := setupWithTmpPath(t, "file", &api.Response{TempPath: tempPath, ProcessLsif: true}, nil)
defer s.cleanup()
file, err := os.Open("../../testdata/lsif/invalid.lsif.zip")
require.NoError(t, err)
_, err = io.Copy(s.fileWriter, file)
require.NoError(t, err)
require.NoError(t, file.Close())
require.NoError(t, s.writer.Close())
response := testUploadArtifacts(t, s.writer.FormDataContentType(), s.url, s.buffer)
......
package parser
import (
"bytes"
"encoding/json"
"html/template"
"io"
"strings"
"github.com/alecthomas/chroma"
"github.com/alecthomas/chroma/lexers"
)
var (
languageTemplate = template.Must(template.New("lang").Parse(`<span class="line" lang="{{.}}">`))
valueTemplate = template.Must(template.New("value").Parse(`<span class="{{.Class}}">{{.Value}}</span>`))
)
type CodeHover struct {
Value string `json:"value"`
Language string `json:"language,omitempty"`
}
func NewCodeHover(content json.RawMessage) (*CodeHover, error) {
// Hover value can be either an object: { "value": "func main()", "language": "go" }
// Or a string with documentation
// We try to unmarshal the content into a string and if we fail, we unmarshal it into an object
var codeHover CodeHover
if err := json.Unmarshal(content, &codeHover.Value); err != nil {
if err := json.Unmarshal(content, &codeHover); err != nil {
return nil, err
}
codeHover.Highlight()
}
return &codeHover, nil
}
func (c *CodeHover) Highlight() {
var b bytes.Buffer
for i, line := range c.codeLines() {
if i > 0 {
if _, err := io.WriteString(&b, "\n"); err != nil {
return
}
}
languageTemplate.Execute(&b, c.Language)
for _, token := range line {
if err := writeTokenValue(&b, token); err != nil {
return
}
}
if _, err := io.WriteString(&b, "</span>"); err != nil {
return
}
}
c.Value = b.String()
}
func writeTokenValue(w io.Writer, token chroma.Token) error {
if strings.HasPrefix(token.Type.String(), "Keyword") || token.Type == chroma.String || token.Type == chroma.Comment {
data := struct {
Class string
Value string
}{
Class: chroma.StandardTypes[token.Type],
Value: replaceNewLines(token.Value),
}
return valueTemplate.Execute(w, data)
}
_, err := io.WriteString(w, template.HTMLEscapeString(replaceNewLines(token.Value)))
return err
}
func replaceNewLines(value string) string {
return strings.ReplaceAll(value, "\n", "")
}
func (c *CodeHover) codeLines() [][]chroma.Token {
lexer := lexers.Get(c.Language)
if lexer == nil {
return [][]chroma.Token{}
}
iterator, err := lexer.Tokenise(nil, c.Value)
if err != nil {
return [][]chroma.Token{}
}
return chroma.SplitTokensIntoLines(iterator.Tokens())
}
package parser
import (
"encoding/json"
"fmt"
"testing"
"github.com/stretchr/testify/require"
)
func TestHighlight(t *testing.T) {
tests := []struct {
name string
language string
value string
want string
}{
{
name: "go function definition",
language: "go",
value: "func main()",
want: "<span class=\"line\" lang=\"go\"><span class=\"kd\">func</span> main()</span>",
},
{
name: "go struct definition",
language: "go",
value: "type Command struct",
want: "<span class=\"line\" lang=\"go\"><span class=\"kd\">type</span> Command <span class=\"kd\">struct</span></span>",
},
{
name: "go struct multiline definition",
language: "go",
value: `struct {\nConfig *Config\nReadWriter *ReadWriter\nEOFSent bool\n}`,
want: "<span class=\"line\" lang=\"go\"><span class=\"kd\">struct</span> {</span>\n<span class=\"line\" lang=\"go\">Config *Config</span>\n<span class=\"line\" lang=\"go\">ReadWriter *ReadWriter</span>\n<span class=\"line\" lang=\"go\">EOFSent <span class=\"kt\">bool</span></span>\n<span class=\"line\" lang=\"go\">}</span>",
},
{
name: "ruby method definition",
language: "ruby",
value: "def read(line)",
want: "<span class=\"line\" lang=\"ruby\"><span class=\"k\">def</span> read(line)</span>",
},
{
name: "amp symbol is escaped",
language: "ruby",
value: `def &(line)\nend`,
want: "<span class=\"line\" lang=\"ruby\"><span class=\"k\">def</span> &amp;(line)</span>\n<span class=\"line\" lang=\"ruby\"><span class=\"k\">end</span></span>",
},
{
name: "less symbol is escaped",
language: "ruby",
value: "def <(line)",
want: "<span class=\"line\" lang=\"ruby\"><span class=\"k\">def</span> &lt;(line)</span>",
},
{
name: "more symbol is escaped",
language: "ruby",
value: `def >(line)\nend`,
want: "<span class=\"line\" lang=\"ruby\"><span class=\"k\">def</span> &gt;(line)</span>\n<span class=\"line\" lang=\"ruby\"><span class=\"k\">end</span></span>",
},
{
name: "unknown/malicious language is passed",
language: "<lang> alert(1); </lang>",
value: `def a;\nend`,
want: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
raw := []byte(fmt.Sprintf(`{"language":"%s","value":"%s"}`, tt.language, tt.value))
c, err := NewCodeHover(json.RawMessage(raw))
require.NoError(t, err)
require.Equal(t, tt.want, c.Value)
})
}
}
func TestMarkdown(t *testing.T) {
value := `"This method reverses a string \n\n"`
c, err := NewCodeHover(json.RawMessage(value))
require.NoError(t, err)
require.Equal(t, "This method reverses a string \n\n", c.Value)
}
package parser
import (
"archive/zip"
"encoding/json"
"strings"
)
type Line struct {
Type string `json:"label"`
}
type Docs struct {
Root string
Entries map[string]string
DocRanges map[string][]string
Ranges *Ranges
}
type Document struct {
Id string `json:"id"`
Uri string `json:"uri"`
}
type DocumentRange struct {
OutV string `json:"outV"`
RangeIds []string `json:"inVs"`
}
type Metadata struct {
Root string `json:"projectRoot"`
}
func NewDocs(tempDir string) (*Docs, error) {
ranges, err := NewRanges(tempDir)
if err != nil {
return nil, err
}
return &Docs{
Root: "file:///",
Entries: make(map[string]string),
DocRanges: make(map[string][]string),
Ranges: ranges,
}, nil
}
func (d *Docs) Read(line []byte) error {
l := Line{}
if err := json.Unmarshal(line, &l); err != nil {
return err
}
switch l.Type {
case "metaData":
if err := d.addMetadata(line); err != nil {
return err
}
case "document":
if err := d.addDocument(line); err != nil {
return err
}
case "contains":
if err := d.addDocRanges(line); err != nil {
return err
}
default:
return d.Ranges.Read(l.Type, line)
}
return nil
}
func (d *Docs) Close() error {
return d.Ranges.Close()
}
func (d *Docs) SerializeEntries(w *zip.Writer) error {
for id, path := range d.Entries {
filePath := Lsif + "/" + path + ".json"
f, err := w.Create(filePath)
if err != nil {
return err
}
if err := d.Ranges.Serialize(f, d.DocRanges[id], d.Entries); err != nil {
return err
}
}
return nil
}
func (d *Docs) addMetadata(line []byte) error {
var metadata Metadata
if err := json.Unmarshal(line, &metadata); err != nil {
return err
}
d.Root = strings.TrimSpace(metadata.Root) + "/"
return nil
}
func (d *Docs) addDocument(line []byte) error {
var doc Document
if err := json.Unmarshal(line, &doc); err != nil {
return err
}
d.Entries[doc.Id] = strings.TrimPrefix(doc.Uri, d.Root)
return nil
}
func (d *Docs) addDocRanges(line []byte) error {
var docRange DocumentRange
if err := json.Unmarshal(line, &docRange); err != nil {
return err
}
d.DocRanges[docRange.OutV] = docRange.RangeIds
return nil
}
package parser
import (
"fmt"
"testing"
"github.com/stretchr/testify/require"
)
func createLine(id, label, uri string) []byte {
return []byte(fmt.Sprintf(`{"id":"%s","label":"%s","uri":"%s"}`, id, label, uri))
}
func TestRead(t *testing.T) {
d, err := NewDocs("")
require.NoError(t, err)
defer d.Close()
metadataLine := []byte(`{"id":"1","label":"metaData","projectRoot":"file:///Users/nested"}`)
require.NoError(t, d.Read(metadataLine))
require.NoError(t, d.Read(createLine("2", "document", "file:///Users/nested/file.rb")))
require.NoError(t, d.Read(createLine("3", "document", "file:///Users/nested/folder/file.rb")))
require.NoError(t, d.Read(createLine("4", "document", "file:///Users/wrong/file.rb")))
require.Equal(t, d.Entries["2"], "file.rb")
require.Equal(t, d.Entries["3"], "folder/file.rb")
require.Equal(t, d.Entries["4"], "file:///Users/wrong/file.rb")
}
func TestReadContainsLine(t *testing.T) {
d, err := NewDocs("")
require.NoError(t, err)
defer d.Close()
line := []byte(`{"id":"5","label":"contains","outV":"1", "inVs": ["2", "3"]}`)
require.NoError(t, d.Read(line))
require.Equal(t, []string{"2", "3"}, d.DocRanges["1"])
}
package parser
import (
"encoding/json"
"io/ioutil"
"os"
)
type Offset struct {
At int
Len int
}
type Hovers struct {
Offsets map[string]*Offset
File *os.File
CurrentOffset int
}
type RawResult struct {
Contents []json.RawMessage `json:"contents"`
}
type RawData struct {
Id string `json:"id"`
Result RawResult `json:"result"`
}
type HoverRef struct {
ResultSetId string `json:"outV"`
HoverId string `json:"inV"`
}
type ResultSetRef struct {
ResultSetId string `json:"outV"`
RefId string `json:"inV"`
}
func NewHovers(tempDir string) (*Hovers, error) {
file, err := ioutil.TempFile(tempDir, "hovers")
if err != nil {
return nil, err
}
return &Hovers{
Offsets: make(map[string]*Offset),
File: file,
CurrentOffset: 0,
}, nil
}
func (h *Hovers) Read(label string, line []byte) error {
switch label {
case "hoverResult":
if err := h.addData(line); err != nil {
return err
}
case "textDocument/hover":
if err := h.addHoverRef(line); err != nil {
return err
}
case "textDocument/references":
if err := h.addResultSetRef(line); err != nil {
return err
}
}
return nil
}
func (h *Hovers) For(refId string) json.RawMessage {
offset, ok := h.Offsets[refId]
if !ok || offset == nil {
return nil
}
hover := make([]byte, offset.Len)
_, err := h.File.ReadAt(hover, int64(offset.At))
if err != nil {
return nil
}
return json.RawMessage(hover)
}
func (h *Hovers) Close() error {
if err := h.File.Close(); err != nil {
return err
}
return os.Remove(h.File.Name())
}
func (h *Hovers) addData(line []byte) error {
var rawData RawData
if err := json.Unmarshal(line, &rawData); err != nil {
return err
}
codeHovers := []*CodeHover{}
for _, rawContent := range rawData.Result.Contents {
codeHover, err := NewCodeHover(rawContent)
if err != nil {
return err
}
codeHovers = append(codeHovers, codeHover)
}
codeHoversData, err := json.Marshal(codeHovers)
if err != nil {
return err
}
n, err := h.File.Write(codeHoversData)
if err != nil {
return err
}
h.Offsets[rawData.Id] = &Offset{At: h.CurrentOffset, Len: n}
h.CurrentOffset += n
return nil
}
func (h *Hovers) addHoverRef(line []byte) error {
var hoverRef HoverRef
if err := json.Unmarshal(line, &hoverRef); err != nil {
return err
}
h.Offsets[hoverRef.ResultSetId] = h.Offsets[hoverRef.HoverId]
return nil
}
func (h *Hovers) addResultSetRef(line []byte) error {
var ref ResultSetRef
if err := json.Unmarshal(line, &ref); err != nil {
return err
}
offset, ok := h.Offsets[ref.ResultSetId]
if !ok {
return nil
}
h.Offsets[ref.RefId] = offset
delete(h.Offsets, ref.ResultSetId)
return nil
}
package parser
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestHoversRead(t *testing.T) {
h := setupHovers(t)
require.Equal(t, `[{"value":"hello"}]`, string(h.For("1")))
require.NoError(t, h.Close())
}
func setupHovers(t *testing.T) *Hovers {
h, err := NewHovers("")
require.NoError(t, err)
require.NoError(t, h.Read("hoverResult", []byte(`{"id":"2","label":"hoverResult","result":{"contents": ["hello"]}}`)))
require.NoError(t, h.Read("textDocument/hover", []byte(`{"id":"4","label":"textDocument/hover","outV":"3","inV":"2"}`)))
require.NoError(t, h.Read("textDocument/references", []byte(`{"id":"3","label":"textDocument/references","outV":"3","inV":"1"}`)))
return h
}
package parser
import (
"archive/zip"
"bufio"
"bytes"
"errors"
"io"
"io/ioutil"
"os"
)
var (
Lsif = "lsif"
)
type Parser struct {
Docs *Docs
}
func NewParser(r io.Reader, tempDir string) (*Parser, error) {
docs, err := NewDocs(tempDir)
if err != nil {
return nil, err
}
zr, err := openZipReader(r, tempDir)
if err != nil {
return nil, err
}
reader := bufio.NewReader(zr)
for {
line, err := reader.ReadBytes('\n')
if err != nil {
break
}
if err := docs.Read(line); err != nil {
return nil, err
}
}
return &Parser{Docs: docs}, nil
}
func (p *Parser) ZipReader() (io.Reader, error) {
buf := new(bytes.Buffer)
w := zip.NewWriter(buf)
if err := p.Docs.SerializeEntries(w); err != nil {
return nil, err
}
if err := w.Close(); err != nil {
return nil, err
}
return buf, nil
}
func (p *Parser) Close() error {
return p.Docs.Close()
}
func openZipReader(reader io.Reader, tempDir string) (io.Reader, error) {
tempFile, err := ioutil.TempFile(tempDir, Lsif)
if err != nil {
return nil, err
}
defer os.Remove(tempFile.Name())
if _, err := io.Copy(tempFile, reader); err != nil {
return nil, err
}
zr, err := zip.OpenReader(tempFile.Name())
if err != nil {
return nil, err
}
f := zr.File[0]
if f == nil {
return nil, errors.New("invalid zip file")
}
return f.Open()
}
package parser
import (
"archive/zip"
"bytes"
"encoding/json"
"io"
"io/ioutil"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
)
func TestGenerate(t *testing.T) {
filePath := "testdata/dump.lsif.zip"
tmpDir := filePath + ".tmp"
defer os.RemoveAll(tmpDir)
createFiles(t, filePath, tmpDir)
verifyCorrectnessOf(t, tmpDir, "lsif/main.go.json")
verifyCorrectnessOf(t, tmpDir, "lsif/morestrings/reverse.go.json")
}
func verifyCorrectnessOf(t *testing.T, tmpDir, fileName string) {
file, err := ioutil.ReadFile(filepath.Join(tmpDir, fileName))
require.NoError(t, err)
var buf bytes.Buffer
require.NoError(t, json.Indent(&buf, file, "", " "))
expected, err := ioutil.ReadFile(filepath.Join("testdata/expected/", fileName))
require.NoError(t, err)
require.Equal(t, string(expected), buf.String())
}
func createFiles(t *testing.T, filePath, tmpDir string) {
file, err := os.Open(filePath)
require.NoError(t, err)
p, err := NewParser(file, "")
require.NoError(t, err)
r, err := p.ZipReader()
require.NoError(t, err)
require.NoError(t, p.Close())
zipFileName := tmpDir + ".zip"
w, err := os.Create(zipFileName)
require.NoError(t, err)
defer os.RemoveAll(zipFileName)
_, err = io.Copy(w, r)
require.NoError(t, err)
extractZipFiles(t, tmpDir, zipFileName)
}
func extractZipFiles(t *testing.T, tmpDir, zipFileName string) {
zipReader, err := zip.OpenReader(zipFileName)
require.NoError(t, err)
for _, file := range zipReader.Reader.File {
zippedFile, err := file.Open()
require.NoError(t, err)
defer zippedFile.Close()
fileDir, fileName := filepath.Split(file.Name)
require.NoError(t, os.MkdirAll(filepath.Join(tmpDir, fileDir), os.ModePerm))
outputFile, err := os.Create(filepath.Join(tmpDir, fileDir, fileName))
require.NoError(t, err)
defer outputFile.Close()
_, err = io.Copy(outputFile, zippedFile)
require.NoError(t, err)
}
}
package parser
import (
"encoding/json"
"io"
"strconv"
)
const Definitions = "definitions"
const References = "references"
type Ranges struct {
Entries map[string]*Range
DefRefs map[string]*DefRef
Hovers *Hovers
}
type RawRange struct {
Id string `json:"id"`
Data Range `json:"start"`
}
type Range struct {
Line int `json:"line"`
Character int `json:"character"`
RefId string
}
type RawDefRef struct {
Property string `json:"property"`
RefId string `json:"outV"`
RangeIds []string `json:"inVs"`
DocId string `json:"document"`
}
type DefRef struct {
Line string
DocId string
}
type SerializedRange struct {
StartLine int `json:"start_line"`
StartChar int `json:"start_char"`
DefinitionPath string `json:"definition_path,omitempty"`
Hover json.RawMessage `json:"hover"`
}
func NewRanges(tempDir string) (*Ranges, error) {
hovers, err := NewHovers(tempDir)
if err != nil {
return nil, err
}
return &Ranges{
Entries: make(map[string]*Range),
DefRefs: make(map[string]*DefRef),
Hovers: hovers,
}, nil
}
func (r *Ranges) Read(label string, line []byte) error {
switch label {
case "range":
if err := r.addRange(line); err != nil {
return err
}
case "item":
if err := r.addItem(line); err != nil {
return err
}
default:
return r.Hovers.Read(label, line)
}
return nil
}
func (r *Ranges) Serialize(f io.Writer, rangeIds []string, docs map[string]string) error {
encoder := json.NewEncoder(f)
n := len(rangeIds)
if _, err := f.Write([]byte("[")); err != nil {
return err
}
for i, rangeId := range rangeIds {
entry := r.Entries[rangeId]
serializedRange := SerializedRange{
StartLine: entry.Line,
StartChar: entry.Character,
DefinitionPath: r.definitionPathFor(docs, entry.RefId),
Hover: r.Hovers.For(entry.RefId),
}
if err := encoder.Encode(serializedRange); err != nil {
return err
}
if i+1 < n {
if _, err := f.Write([]byte(",")); err != nil {
return err
}
}
}
if _, err := f.Write([]byte("]")); err != nil {
return err
}
return nil
}
func (r *Ranges) Close() error {
return r.Hovers.Close()
}
func (r *Ranges) definitionPathFor(docs map[string]string, refId string) string {
defRef, ok := r.DefRefs[refId]
if !ok {
return ""
}
defPath := docs[defRef.DocId] + "#L" + defRef.Line
return defPath
}
func (r *Ranges) addRange(line []byte) error {
var rg RawRange
if err := json.Unmarshal(line, &rg); err != nil {
return err
}
r.Entries[rg.Id] = &rg.Data
return nil
}
func (r *Ranges) addItem(line []byte) error {
var defRef RawDefRef
if err := json.Unmarshal(line, &defRef); err != nil {
return err
}
if defRef.Property != Definitions && defRef.Property != References {
return nil
}
for _, rangeId := range defRef.RangeIds {
if entry, ok := r.Entries[rangeId]; ok {
entry.RefId = defRef.RefId
}
}
if defRef.Property != Definitions {
return nil
}
defRange := r.Entries[defRef.RangeIds[0]]
r.DefRefs[defRef.RefId] = &DefRef{
Line: strconv.Itoa(defRange.Line + 1),
DocId: defRef.DocId,
}
return nil
}
package parser
import (
"bytes"
"testing"
"github.com/stretchr/testify/require"
)
func TestRangesRead(t *testing.T) {
r, cleanup := setup(t)
defer cleanup()
firstRange := Range{Line: 1, Character: 2, RefId: "3"}
require.Equal(t, &firstRange, r.Entries["1"])
secondRange := Range{Line: 5, Character: 4, RefId: "3"}
require.Equal(t, &secondRange, r.Entries["2"])
}
func TestSerialize(t *testing.T) {
r, cleanup := setup(t)
defer cleanup()
docs := map[string]string{"6": "def-path"}
var buf bytes.Buffer
err := r.Serialize(&buf, []string{"1"}, docs)
want := `[{"start_line":1,"start_char":2,"definition_path":"def-path#L2","hover":null}` + "\n]"
require.NoError(t, err)
require.Equal(t, want, buf.String())
}
func setup(t *testing.T) (*Ranges, func()) {
r, err := NewRanges("")
require.NoError(t, err)
require.NoError(t, r.Read("range", []byte(`{"id":"1","label":"range","start":{"line":1,"character":2}}`)))
require.NoError(t, r.Read("range", []byte(`{"id":"2","label":"range","start":{"line":5,"character":4}}`)))
require.NoError(t, r.Read("item", []byte(`{"id":"4","label":"item","property":"definitions","outV":"3","inVs":["1"],"document":"6"}`)))
require.NoError(t, r.Read("item", []byte(`{"id":"4","label":"item","property":"references","outV":"3","inVs":["2"]}`)))
cleanup := func() {
require.NoError(t, r.Close())
}
return r, cleanup
}
[
{
"start_line": 7,
"start_char": 1,
"definition_path": "main.go#L4",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kn\"\u003epackage\u003c/span\u003e \u003cspan class=\"s\"\u003e\u0026#34;github.com/user/hello/morestrings\u0026#34;\u003c/span\u003e\u003c/span\u003e",
"language": "go"
},
{
"value": "Package morestrings implements additional functions to manipulate UTF-8 encoded strings, beyond what is provided in the standard \"strings\" package. \n\n"
}
]
},
{
"start_line": 7,
"start_char": 13,
"definition_path": "morestrings/reverse.go#L12",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kd\"\u003efunc\u003c/span\u003e Reverse(s \u003cspan class=\"kt\"\u003estring\u003c/span\u003e) \u003cspan class=\"kt\"\u003estring\u003c/span\u003e\u003c/span\u003e",
"language": "go"
},
{
"value": "This method reverses a string \n\n"
}
]
},
{
"start_line": 8,
"start_char": 1,
"definition_path": "main.go#L4",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kn\"\u003epackage\u003c/span\u003e \u003cspan class=\"s\"\u003e\u0026#34;github.com/user/hello/morestrings\u0026#34;\u003c/span\u003e\u003c/span\u003e",
"language": "go"
},
{
"value": "Package morestrings implements additional functions to manipulate UTF-8 encoded strings, beyond what is provided in the standard \"strings\" package. \n\n"
}
]
},
{
"start_line": 8,
"start_char": 13,
"definition_path": "morestrings/reverse.go#L5",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kd\"\u003efunc\u003c/span\u003e Func2(i \u003cspan class=\"kt\"\u003eint\u003c/span\u003e) \u003cspan class=\"kt\"\u003estring\u003c/span\u003e\u003c/span\u003e",
"language": "go"
}
]
},
{
"start_line": 6,
"start_char": 5,
"definition_path": "main.go#L7",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kd\"\u003efunc\u003c/span\u003e main()\u003c/span\u003e",
"language": "go"
}
]
},
{
"start_line": 3,
"start_char": 2,
"definition_path": "main.go#L4",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kn\"\u003epackage\u003c/span\u003e \u003cspan class=\"s\"\u003e\u0026#34;github.com/user/hello/morestrings\u0026#34;\u003c/span\u003e\u003c/span\u003e",
"language": "go"
},
{
"value": "Package morestrings implements additional functions to manipulate UTF-8 encoded strings, beyond what is provided in the standard \"strings\" package. \n\n"
}
]
}
]
\ No newline at end of file
[
{
"start_line": 11,
"start_char": 5,
"definition_path": "morestrings/reverse.go#L12",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kd\"\u003efunc\u003c/span\u003e Reverse(s \u003cspan class=\"kt\"\u003estring\u003c/span\u003e) \u003cspan class=\"kt\"\u003estring\u003c/span\u003e\u003c/span\u003e",
"language": "go"
},
{
"value": "This method reverses a string \n\n"
}
]
},
{
"start_line": 4,
"start_char": 11,
"definition_path": "morestrings/reverse.go#L5",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kd\"\u003evar\u003c/span\u003e i \u003cspan class=\"kt\"\u003eint\u003c/span\u003e\u003c/span\u003e",
"language": "go"
}
]
},
{
"start_line": 11,
"start_char": 13,
"definition_path": "morestrings/reverse.go#L12",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kd\"\u003evar\u003c/span\u003e s \u003cspan class=\"kt\"\u003estring\u003c/span\u003e\u003c/span\u003e",
"language": "go"
}
]
},
{
"start_line": 12,
"start_char": 1,
"definition_path": "morestrings/reverse.go#L13",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kd\"\u003evar\u003c/span\u003e a \u003cspan class=\"kt\"\u003estring\u003c/span\u003e\u003c/span\u003e",
"language": "go"
}
]
},
{
"start_line": 5,
"start_char": 1,
"definition_path": "morestrings/reverse.go#L6",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kd\"\u003evar\u003c/span\u003e b \u003cspan class=\"kt\"\u003estring\u003c/span\u003e\u003c/span\u003e",
"language": "go"
}
]
},
{
"start_line": 14,
"start_char": 8,
"definition_path": "morestrings/reverse.go#L13",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kd\"\u003evar\u003c/span\u003e a \u003cspan class=\"kt\"\u003estring\u003c/span\u003e\u003c/span\u003e",
"language": "go"
}
]
},
{
"start_line": 7,
"start_char": 8,
"definition_path": "morestrings/reverse.go#L6",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kd\"\u003evar\u003c/span\u003e b \u003cspan class=\"kt\"\u003estring\u003c/span\u003e\u003c/span\u003e",
"language": "go"
}
]
},
{
"start_line": 4,
"start_char": 5,
"definition_path": "morestrings/reverse.go#L5",
"hover": [
{
"value": "\u003cspan class=\"line\" lang=\"go\"\u003e\u003cspan class=\"kd\"\u003efunc\u003c/span\u003e Func2(i \u003cspan class=\"kt\"\u003eint\u003c/span\u003e) \u003cspan class=\"kt\"\u003estring\u003c/span\u003e\u003c/span\u003e",
"language": "go"
}
]
}
]
\ No newline at end of file
......@@ -14,6 +14,7 @@ import (
"gitlab.com/gitlab-org/gitlab-workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/filestore"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/lsif_transformer/parser"
"gitlab.com/gitlab-org/gitlab-workhorse/internal/upload/exif"
)
......@@ -140,6 +141,11 @@ func (rew *rewriter) handleFilePart(ctx context.Context, name string, p *multipa
inputReader = p
}
inputReader, err := rew.handleLsifUpload(inputReader, opts.LocalTempPath)
if err != nil {
return err
}
fh, err := filestore.SaveFileFromReader(ctx, inputReader, -1, opts)
if err != nil {
switch err {
......@@ -165,6 +171,28 @@ func (rew *rewriter) handleFilePart(ctx context.Context, name string, p *multipa
return rew.filter.ProcessFile(ctx, name, fh, rew.writer)
}
func (rew *rewriter) handleLsifUpload(reader io.Reader, tempPath string) (io.Reader, error) {
if rew.preauth.ProcessLsif {
p, err := parser.NewParser(reader, tempPath)
if err != nil {
return nil, err
}
z, err := p.ZipReader()
if err != nil {
return nil, err
}
if err := p.Close(); err != nil {
return nil, err
}
return z, nil
}
return reader, nil
}
func (rew *rewriter) copyPart(ctx context.Context, name string, p *multipart.Part) error {
np, err := rew.writer.CreatePart(p.Header)
if err != nil {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment