pax_global_header00006660000000000000000000000064126541417300014515gustar00rootroot0000000000000052 comment=fb0deba81299ab345b6de447113139ce6bf5982b gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/000077500000000000000000000000001265414173000225255ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/.gitignore000066400000000000000000000001431265414173000245130ustar00rootroot00000000000000gitlab-workhorse testdata/data testdata/scratch testdata/public gitlab-zip-cat gitlab-zip-metadata gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/.gitlab-ci.yml000066400000000000000000000006551265414173000251670ustar00rootroot00000000000000before_script: - apt-get update -qq && apt-get install -y curl unzip bzip2 - curl -O https://storage.googleapis.com/golang/go1.5.3.linux-amd64.tar.gz - echo '43afe0c5017e502630b1aea4d44b8a7f059bf60d7f29dfd58db454d4e4e0ae53 go1.5.3.linux-amd64.tar.gz' | shasum -c -a256 - - rm -rf /usr/local/go && tar -C /usr/local -xzf go1.5.3.linux-amd64.tar.gz - export PATH=/usr/local/go/bin:$PATH test: script: make clean test gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/CHANGELOG000066400000000000000000000043641265414173000237460ustar00rootroot00000000000000# Changelog for gitlab-workhorse Formerly known as 'gitlab-git-http-server'. 0.6.3 Add support for sending Git raw git blobs via gitlab-workhorse. 0.6.2 We now fill in missing directory entries in archize zip metadata files; also some other minor changes. 0.6.1 Add support for generating zip artifacts metadata and serving single files from zip archives. Gitlab-workhorse now consists of multiple executables. We also fixed a routing bug introduced by the 0.6.0 refactor that broke relative URL support. 0.6.0 Overhauled the source code organization; no user-facing changes (intended). The application code is now split into Go 'packages' (modules). As of 0.6.0 gitlab-workhorse requires Go 1.5 or newer. 0.5.4 Fix /api/v3/projects routing bug introduced in 0.5.2-0.5.3. 0.5.3 Fixes merge error in 0.5.2. 0.5.2 (broken!) - Always check with upstream if files in /uploads/ may be served - Fix project%2Fnamespace API project ID's - Prevent archive zombies when using gzip or bzip2 - Don't show pretty error pages in development mode 0.5.1 Deprecate -relativeURLRoot option, use -authBackend instead. 0.5.0 Send ALL GitLab requests through gitlab-workhorse. 0.4.2 Return response to client when uploading Git LFS object. 0.4.1 Add support for Build Artifacts and Git LFS. The GitLab-Workhorse offloads file uploading and downloading by providing support for rewriting multipart form data and X-Sendfile. Other changes: - add header Gitlab-Workhorse to all requests to indicate from where they originated 0.4.0 Rename the project to gitlab-workhorse. The old name had become too specific. Other changes: - pass LD_LIBRARY_PATH to Git commands - accomodate broken HTTP clients by spelling 'Www-Authenticate' as 'WWW-Authenticate' 0.3.1 Add support for Unix domain socket connections to the authBackend. 0.3.0 In 0.3.0 we also handle 'git archive' downloads for GitLab 8.1+. This has lead to some breaking API changes, making 0.3.0 incompatible with GitLab 8.0. We now expect the 'auth backend' (GitLab) to provide us with much more information about each request, such as the path on disk to the Git repository the client is requesting. This makes the REPO_ROOT command line argument obsolete. 0.2.14 This is the last version that works with GitLab 8.0. gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/LICENSE000066400000000000000000000020661265414173000235360ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2015 GitLab B.V. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/Makefile000066400000000000000000000024411265414173000241660ustar00rootroot00000000000000PREFIX=/usr/local VERSION=$(shell git describe)-$(shell date -u +%Y%m%d.%H%M%S) GOBUILD=go build -ldflags "-X main.Version=${VERSION}" all: gitlab-zip-cat gitlab-zip-metadata gitlab-workhorse gitlab-zip-cat: $(shell find cmd/gitlab-zip-cat/ -name '*.go') ${GOBUILD} -o $@ ./cmd/$@ gitlab-zip-metadata: $(shell find cmd/gitlab-zip-metadata/ -name '*.go') ${GOBUILD} -o $@ ./cmd/$@ gitlab-workhorse: $(shell find . -name '*.go') ${GOBUILD} -o $@ install: gitlab-workhorse gitlab-zip-cat gitlab-zip-metadata mkdir -p $(DESTDIR)${PREFIX}/bin/ install gitlab-workhorse gitlab-zip-cat gitlab-zip-metadata ${DESTDIR}${PREFIX}/bin/ .PHONY: test test: testdata/data/group/test.git clean-workhorse all go fmt ./... | awk '{ print } END { if (NR > 0) { print "Please run go fmt"; exit 1 } }' support/path go test ./... @echo SUCCESS coverage: testdata/data/group/test.git go test -cover -coverprofile=test.coverage go tool cover -html=test.coverage -o coverage.html rm -f test.coverage testdata/data/group/test.git: testdata/data git clone --bare https://gitlab.com/gitlab-org/gitlab-test.git $@ testdata/data: mkdir -p $@ .PHONY: clean clean: clean-workhorse rm -rf testdata/data testdata/scratch .PHONY: clean-workhorse clean-workhorse: rm -f gitlab-workhorse gitlab-zip-cat gitlab-zip-metadata gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/README.md000066400000000000000000000056131265414173000240110ustar00rootroot00000000000000# gitlab-workhorse Gitlab-workhorse is a smart reverse proxy for GitLab. It handles "large" HTTP requests such as file downloads, file uploads, Git push/pull and Git archive downloads. ## Usage ``` gitlab-workhorse [OPTIONS] Options: -authBackend string Authentication/authorization backend (default "http://localhost:8080") -authSocket string Optional: Unix domain socket to dial authBackend at -developmentMode Allow to serve assets from Rails app -documentRoot string Path to static files content (default "public") -listenAddr string Listen address for HTTP server (default "localhost:8181") -listenNetwork string Listen 'network' (tcp, tcp4, tcp6, unix) (default "tcp") -listenUmask int Umask for Unix socket, default: 022 (default 18) -pprofListenAddr string pprof listening address, e.g. 'localhost:6060' -proxyHeadersTimeout duration How long to wait for response headers when proxying the request (default 1m0s) -version Print version and exit ``` The 'auth backend' refers to the GitLab Rails application. The name is a holdover from when gitlab-workhorse only handled Git push/pull over HTTP. Gitlab-workhorse can listen on either a TCP or a Unix domain socket. It can also open a second listening TCP listening socket with the Go [net/http/pprof profiler server](http://golang.org/pkg/net/http/pprof/). ### Relative URL support If you are mounting GitLab at a relative URL, e.g. `example.com/gitlab`, then you should also use this relative URL in the `authBackend` setting: ``` gitlab-workhorse -authBackend http://localhost:8080/gitlab ``` ## Installation To install gitlab-workhorse you need [Go 1.5 or newer](https://golang.org/dl). To install into `/usr/local/bin` run `make install`. ``` make install ``` To install into `/foo/bin` set the PREFIX variable. ``` make install PREFIX=/foo ``` ## Tests ``` make clean test ``` ## Try it out You can try out the Git server without authentication as follows: ``` # Start a fake auth backend that allows everything/everybody make test/data/test.git go run support/fake-auth-backend.go ~+/test/data/test.git & # Start gitlab-workhorse make ./gitlab-workhorse ``` Now you can try things like: ``` git clone http://localhost:8181/test.git curl -JO http://localhost:8181/test/repository/archive.zip ``` ## Example request flow - start POST repo.git/git-receive-pack to NGINX - ..start POST repo.git/git-receive-pack to gitlab-workhorse - ....start POST repo.git/git-receive-pack to Unicorn for auth - ....end POST to Unicorn for auth - ....start git-receive-pack process from gitlab-workhorse - ......start POST /api/v3/internal/allowed to Unicorn from Git hook (check protected branches) - ......end POST to Unicorn from Git hook - ....end git-receive-pack process - ..end POST to gitlab-workhorse - end POST to NGINX ## License This code is distributed under the MIT license, see the LICENSE file. gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/VERSION000066400000000000000000000000061265414173000235710ustar00rootroot000000000000000.6.3 gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/authorization_test.go000066400000000000000000000026061265414173000270170ustar00rootroot00000000000000package main import ( "./internal/api" "./internal/helper" "./internal/testhelper" "fmt" "net/http" "net/http/httptest" "regexp" "testing" ) func okHandler(w http.ResponseWriter, _ *http.Request, _ *api.Response) { w.WriteHeader(201) fmt.Fprint(w, "{\"status\":\"ok\"}") } func runPreAuthorizeHandler(t *testing.T, suffix string, url *regexp.Regexp, apiResponse interface{}, returnCode, expectedCode int) *httptest.ResponseRecorder { // Prepare test server and backend ts := testAuthServer(url, returnCode, apiResponse) defer ts.Close() // Create http request httpRequest, err := http.NewRequest("GET", "/address", nil) if err != nil { t.Fatal(err) } a := api.NewAPI(helper.URLMustParse(ts.URL), "123", nil) response := httptest.NewRecorder() a.PreAuthorizeHandler(okHandler, suffix).ServeHTTP(response, httpRequest) testhelper.AssertResponseCode(t, response, expectedCode) return response } func TestPreAuthorizeHappyPath(t *testing.T) { runPreAuthorizeHandler( t, "/authorize", regexp.MustCompile(`/authorize\z`), &api.Response{}, 200, 201) } func TestPreAuthorizeSuffix(t *testing.T) { runPreAuthorizeHandler( t, "/different-authorize", regexp.MustCompile(`/authorize\z`), &api.Response{}, 200, 404) } func TestPreAuthorizeJsonFailure(t *testing.T) { runPreAuthorizeHandler( t, "/authorize", regexp.MustCompile(`/authorize\z`), "not-json", 200, 500) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/cmd/000077500000000000000000000000001265414173000232705ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/cmd/gitlab-zip-cat/000077500000000000000000000000001265414173000260775ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/cmd/gitlab-zip-cat/main.go000066400000000000000000000034561265414173000273620ustar00rootroot00000000000000package main import ( "../../internal/zipartifacts" "archive/zip" "flag" "fmt" "io" "os" ) const progName = "gitlab-zip-cat" var Version = "unknown" var printVersion = flag.Bool("version", false, "Print version and exit") func main() { flag.Parse() version := fmt.Sprintf("%s %s", progName, Version) if *printVersion { fmt.Println(version) os.Exit(0) } if len(os.Args) != 3 { fmt.Fprintf(os.Stderr, "Usage: %s FILE.ZIP ENTRY", progName) os.Exit(1) } archiveFileName := os.Args[1] fileName, err := zipartifacts.DecodeFileEntry(os.Args[2]) if err != nil { fatalError(fmt.Errorf("decode entry %q: %v", os.Args[2], err)) } archive, err := zip.OpenReader(archiveFileName) if err != nil { notFoundError(fmt.Errorf("open %q: %v", archiveFileName, err)) } defer archive.Close() file := findFileInZip(fileName, &archive.Reader) if file == nil { notFoundError(fmt.Errorf("find %q in %q: not found", fileName, archiveFileName)) } // Start decompressing the file reader, err := file.Open() if err != nil { fatalError(fmt.Errorf("open %q in %q: %v", fileName, archiveFileName, err)) } defer reader.Close() if _, err := fmt.Printf("%d\n", file.UncompressedSize64); err != nil { fatalError(fmt.Errorf("write file size: %v", err)) } if _, err := io.Copy(os.Stdout, reader); err != nil { fatalError(fmt.Errorf("write %q from %q to stdout: %v", fileName, archiveFileName, err)) } } func findFileInZip(fileName string, archive *zip.Reader) *zip.File { for _, file := range archive.File { if file.Name == fileName { return file } } return nil } func printError(err error) { fmt.Fprintf(os.Stderr, "%s: %v", progName, err) } func fatalError(err error) { printError(err) os.Exit(1) } func notFoundError(err error) { printError(err) os.Exit(zipartifacts.StatusEntryNotFound) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/cmd/gitlab-zip-metadata/000077500000000000000000000000001265414173000271105ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/cmd/gitlab-zip-metadata/main.go000066400000000000000000000012561265414173000303670ustar00rootroot00000000000000package main import ( "../../internal/zipartifacts" "flag" "fmt" "os" ) const progName = "gitlab-zip-metadata" var Version = "unknown" var printVersion = flag.Bool("version", false, "Print version and exit") func main() { flag.Parse() version := fmt.Sprintf("%s %s", progName, Version) if *printVersion { fmt.Println(version) os.Exit(0) } if len(os.Args) != 2 { fmt.Fprintf(os.Stderr, "Usage: %s FILE.ZIP\n", progName) os.Exit(1) } if err := zipartifacts.GenerateZipMetadataFromFile(os.Args[1], os.Stdout); err != nil { fmt.Fprintf(os.Stderr, "%s: %v\n", progName, err) if err == os.ErrInvalid { os.Exit(zipartifacts.StatusNotZip) } os.Exit(1) } } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/000077500000000000000000000000001265414173000243415ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/api/000077500000000000000000000000001265414173000251125ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/api/api.go000066400000000000000000000135121265414173000262140ustar00rootroot00000000000000package api import ( "../badgateway" "../helper" "encoding/json" "fmt" "io" "io/ioutil" "net/http" "net/url" "strings" ) type API struct { Client *http.Client URL *url.URL Version string } func NewAPI(myURL *url.URL, version string, roundTripper *badgateway.RoundTripper) *API { if roundTripper == nil { roundTripper = badgateway.NewRoundTripper("", 0) } return &API{ Client: &http.Client{Transport: roundTripper}, URL: myURL, Version: version, } } type HandleFunc func(http.ResponseWriter, *http.Request, *Response) type Response struct { // GL_ID is an environment variable used by gitlab-shell hooks during 'git // push' and 'git pull' GL_ID string // RepoPath is the full path on disk to the Git repository the request is // about RepoPath string // ArchivePath is the full path where we should find/create a cached copy // of a requested archive ArchivePath string // ArchivePrefix is used to put extracted archive contents in a // subdirectory ArchivePrefix string // CommitId is used do prevent race conditions between the 'time of check' // in the GitLab Rails app and the 'time of use' in gitlab-workhorse. CommitId string // StoreLFSPath is provided by the GitLab Rails application // to mark where the tmp file should be placed StoreLFSPath string // LFS object id LfsOid string // LFS object size LfsSize int64 // TmpPath is the path where we should store temporary files // This is set by authorization middleware TempPath string // Archive is the path where the artifacts archive is stored Archive string `json:"archive"` // Entry is a filename inside the archive point to file that needs to be extracted Entry string `json:"entry"` } // singleJoiningSlash is taken from reverseproxy.go:NewSingleHostReverseProxy func singleJoiningSlash(a, b string) string { aslash := strings.HasSuffix(a, "/") bslash := strings.HasPrefix(b, "/") switch { case aslash && bslash: return a + b[1:] case !aslash && !bslash: return a + "/" + b } return a + b } // rebaseUrl is taken from reverseproxy.go:NewSingleHostReverseProxy func rebaseUrl(url *url.URL, onto *url.URL, suffix string) *url.URL { newUrl := *url newUrl.Scheme = onto.Scheme newUrl.Host = onto.Host if suffix != "" { newUrl.Path = singleJoiningSlash(url.Path, suffix) } if onto.RawQuery == "" || newUrl.RawQuery == "" { newUrl.RawQuery = onto.RawQuery + newUrl.RawQuery } else { newUrl.RawQuery = onto.RawQuery + "&" + newUrl.RawQuery } return &newUrl } func (api *API) newRequest(r *http.Request, body io.Reader, suffix string) (*http.Request, error) { authReq := &http.Request{ Method: r.Method, URL: rebaseUrl(r.URL, api.URL, suffix), Header: helper.HeaderClone(r.Header), } if body != nil { authReq.Body = ioutil.NopCloser(body) } // Clean some headers when issuing a new request without body if body == nil { authReq.Header.Del("Content-Type") authReq.Header.Del("Content-Encoding") authReq.Header.Del("Content-Length") authReq.Header.Del("Content-Disposition") authReq.Header.Del("Accept-Encoding") // Hop-by-hop headers. These are removed when sent to the backend. // http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html authReq.Header.Del("Transfer-Encoding") authReq.Header.Del("Connection") authReq.Header.Del("Keep-Alive") authReq.Header.Del("Proxy-Authenticate") authReq.Header.Del("Proxy-Authorization") authReq.Header.Del("Te") authReq.Header.Del("Trailers") authReq.Header.Del("Upgrade") } // Also forward the Host header, which is excluded from the Header map by the http libary. // This allows the Host header received by the backend to be consistent with other // requests not going through gitlab-workhorse. authReq.Host = r.Host // Set a custom header for the request. This can be used in some // configurations (Passenger) to solve auth request routing problems. authReq.Header.Set("Gitlab-Workhorse", api.Version) return authReq, nil } func (api *API) PreAuthorizeHandler(h HandleFunc, suffix string) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { authReq, err := api.newRequest(r, nil, suffix) if err != nil { helper.Fail500(w, fmt.Errorf("preAuthorizeHandler: newUpstreamRequest: %v", err)) return } authResponse, err := api.Client.Do(authReq) if err != nil { helper.Fail500(w, fmt.Errorf("preAuthorizeHandler: do %v: %v", authReq.URL.Path, err)) return } defer authResponse.Body.Close() if authResponse.StatusCode != 200 { // The Git request is not allowed by the backend. Maybe the // client needs to send HTTP Basic credentials. Forward the // response from the auth backend to our client. This includes // the 'WWW-Authenticate' header that acts as a hint that // Basic auth credentials are needed. for k, v := range authResponse.Header { // Accomodate broken clients that do case-sensitive header lookup if k == "Www-Authenticate" { w.Header()["WWW-Authenticate"] = v } else { w.Header()[k] = v } } w.WriteHeader(authResponse.StatusCode) io.Copy(w, authResponse.Body) return } a := &Response{} // The auth backend validated the client request and told us additional // request metadata. We must extract this information from the auth // response body. if err := json.NewDecoder(authResponse.Body).Decode(a); err != nil { helper.Fail500(w, fmt.Errorf("preAuthorizeHandler: decode authorization response: %v", err)) return } // Don't hog a TCP connection in CLOSE_WAIT, we can already close it now authResponse.Body.Close() // Negotiate authentication (Kerberos) may need to return a WWW-Authenticate // header to the client even in case of success as per RFC4559. for k, v := range authResponse.Header { // Case-insensitive comparison as per RFC7230 if strings.EqualFold(k, "WWW-Authenticate") { w.Header()[k] = v } } h(w, r, a) }) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/artifacts/000077500000000000000000000000001265414173000263215ustar00rootroot00000000000000artifact_download.go000066400000000000000000000051161265414173000322600ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/artifactspackage artifacts import ( "../api" "../helper" "../zipartifacts" "bufio" "errors" "fmt" "io" "mime" "net/http" "os" "os/exec" "path/filepath" "strings" "syscall" ) func detectFileContentType(fileName string) string { contentType := mime.TypeByExtension(filepath.Ext(fileName)) if contentType == "" { contentType = "application/octet-stream" } return contentType } func unpackFileFromZip(archiveFileName, encodedFilename string, headers http.Header, output io.Writer) error { fileName, err := zipartifacts.DecodeFileEntry(encodedFilename) if err != nil { return err } catFile := exec.Command("gitlab-zip-cat", archiveFileName, encodedFilename) catFile.Stderr = os.Stderr catFile.SysProcAttr = &syscall.SysProcAttr{Setpgid: true} stdout, err := catFile.StdoutPipe() if err != nil { return fmt.Errorf("create gitlab-zip-cat stdout pipe: %v", err) } if err := catFile.Start(); err != nil { return fmt.Errorf("start %v: %v", catFile.Args, err) } defer helper.CleanUpProcessGroup(catFile) basename := filepath.Base(fileName) reader := bufio.NewReader(stdout) contentLength, err := reader.ReadString('\n') if err != nil { if catFileErr := waitCatFile(catFile); catFileErr != nil { return catFileErr } return fmt.Errorf("read content-length: %v", err) } contentLength = strings.TrimSuffix(contentLength, "\n") // Write http headers about the file headers.Set("Content-Length", contentLength) headers.Set("Content-Type", detectFileContentType(fileName)) headers.Set("Content-Disposition", "attachment; filename=\""+escapeQuotes(basename)+"\"") // Copy file body to client if _, err := io.Copy(output, reader); err != nil { return fmt.Errorf("copy stdout of %v: %v", catFile.Args, err) } return waitCatFile(catFile) } func waitCatFile(cmd *exec.Cmd) error { err := cmd.Wait() if err == nil { return nil } if st, ok := helper.ExitStatus(err); ok && st == zipartifacts.StatusEntryNotFound { return os.ErrNotExist } return fmt.Errorf("wait for %v to finish: %v", cmd.Args, err) } // Artifacts downloader doesn't support ranges when downloading a single file func DownloadArtifact(myAPI *api.API) http.Handler { return myAPI.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) { if a.Archive == "" || a.Entry == "" { helper.Fail500(w, errors.New("DownloadArtifact: Archive or Path is empty")) return } err := unpackFileFromZip(a.Archive, a.Entry, w.Header(), w) if os.IsNotExist(err) { http.NotFound(w, r) return } else if err != nil { helper.Fail500(w, fmt.Errorf("DownloadArtifact: %v", err)) } }, "") } artifact_download_test.go000066400000000000000000000054641265414173000333250ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/artifactspackage artifacts import ( "../api" "../helper" "../testhelper" "archive/zip" "encoding/base64" "encoding/json" "fmt" "io/ioutil" "net/http" "net/http/httptest" "os" "testing" ) func testArtifactDownloadServer(t *testing.T, archive string, entry string) *httptest.Server { mux := http.NewServeMux() mux.HandleFunc("/url/path", func(w http.ResponseWriter, r *http.Request) { if r.Method != "GET" { t.Fatal("Expected GET request") } w.Header().Set("Content-Type", "application/json") data, err := json.Marshal(&api.Response{ Archive: archive, Entry: base64.StdEncoding.EncodeToString([]byte(entry)), }) if err != nil { t.Fatal(err) } w.Write(data) }) return testhelper.TestServerWithHandler(nil, mux.ServeHTTP) } func testDownloadArtifact(t *testing.T, ts *httptest.Server) *httptest.ResponseRecorder { httpRequest, err := http.NewRequest("GET", ts.URL+"/url/path", nil) if err != nil { t.Fatal(err) } response := httptest.NewRecorder() apiClient := api.NewAPI(helper.URLMustParse(ts.URL), "123", nil) DownloadArtifact(apiClient).ServeHTTP(response, httpRequest) return response } func TestDownloadingFromValidArchive(t *testing.T) { tempFile, err := ioutil.TempFile("", "uploads") if err != nil { t.Fatal(err) } defer tempFile.Close() defer os.Remove(tempFile.Name()) archive := zip.NewWriter(tempFile) defer archive.Close() fileInArchive, err := archive.Create("test.txt") if err != nil { t.Fatal(err) } fmt.Fprint(fileInArchive, "testtest") archive.Close() ts := testArtifactDownloadServer(t, tempFile.Name(), "test.txt") defer ts.Close() response := testDownloadArtifact(t, ts) testhelper.AssertResponseCode(t, response, 200) testhelper.AssertResponseHeader(t, response, "Content-Type", "text/plain; charset=utf-8") testhelper.AssertResponseHeader(t, response, "Content-Disposition", "attachment; filename=\"test.txt\"") testhelper.AssertResponseBody(t, response, "testtest") } func TestDownloadingNonExistingFile(t *testing.T) { tempFile, err := ioutil.TempFile("", "uploads") if err != nil { t.Fatal(err) } defer tempFile.Close() defer os.Remove(tempFile.Name()) archive := zip.NewWriter(tempFile) defer archive.Close() archive.Close() ts := testArtifactDownloadServer(t, tempFile.Name(), "test") defer ts.Close() response := testDownloadArtifact(t, ts) testhelper.AssertResponseCode(t, response, 404) } func TestDownloadingFromInvalidArchive(t *testing.T) { ts := testArtifactDownloadServer(t, "path/to/non/existing/file", "test") defer ts.Close() response := testDownloadArtifact(t, ts) testhelper.AssertResponseCode(t, response, 404) } func TestIncompleteApiResponse(t *testing.T) { ts := testArtifactDownloadServer(t, "", "") defer ts.Close() response := testDownloadArtifact(t, ts) testhelper.AssertResponseCode(t, response, 500) } artifacts_upload.go000066400000000000000000000040601265414173000321150ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/artifactspackage artifacts import ( "../api" "../helper" "../upload" "../zipartifacts" "errors" "fmt" "io/ioutil" "mime/multipart" "net/http" "os" "os/exec" "syscall" ) type artifactsUploadProcessor struct { TempPath string metadataFile string } func (a *artifactsUploadProcessor) ProcessFile(formName, fileName string, writer *multipart.Writer) error { // ProcessFile for artifacts requires file form-data field name to eq `file` if formName != "file" { return fmt.Errorf("Invalid form field: %q", formName) } if a.metadataFile != "" { return fmt.Errorf("Artifacts request contains more than one file!") } // Create temporary file for metadata and store it's path tempFile, err := ioutil.TempFile(a.TempPath, "metadata_") if err != nil { return err } defer tempFile.Close() a.metadataFile = tempFile.Name() // Generate metadata and save to file zipMd := exec.Command("gitlab-zip-metadata", fileName) zipMd.Stderr = os.Stderr zipMd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true} zipMd.Stdout = tempFile if err := zipMd.Start(); err != nil { return err } defer helper.CleanUpProcessGroup(zipMd) if err := zipMd.Wait(); err != nil { if st, ok := helper.ExitStatus(err); ok && st == zipartifacts.StatusNotZip { return nil } return err } // Pass metadata file path to Rails writer.WriteField("metadata.path", a.metadataFile) writer.WriteField("metadata.name", "metadata.gz") return nil } func (a *artifactsUploadProcessor) ProcessField(formName string, writer *multipart.Writer) error { return nil } func (a *artifactsUploadProcessor) Cleanup() { if a.metadataFile != "" { os.Remove(a.metadataFile) } } func UploadArtifacts(myAPI *api.API, h http.Handler) http.Handler { return myAPI.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) { if a.TempPath == "" { helper.Fail500(w, errors.New("UploadArtifacts: TempPath is empty")) return } mg := &artifactsUploadProcessor{TempPath: a.TempPath} defer mg.Cleanup() upload.HandleFileUploads(w, r, h, a.TempPath, mg) }, "/authorize") } artifacts_upload_test.go000066400000000000000000000101151265414173000331520ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/artifactspackage artifacts import ( "../api" "../helper" "../proxy" "../testhelper" "../zipartifacts" "archive/zip" "bytes" "compress/gzip" "encoding/json" "fmt" "io" "io/ioutil" "mime/multipart" "net/http" "net/http/httptest" "os" "testing" ) func testArtifactsUploadServer(t *testing.T, tempPath string) *httptest.Server { mux := http.NewServeMux() mux.HandleFunc("/url/path/authorize", func(w http.ResponseWriter, r *http.Request) { if r.Method != "POST" { t.Fatal("Expected POST request") } w.Header().Set("Content-Type", "application/json") data, err := json.Marshal(&api.Response{ TempPath: tempPath, }) if err != nil { t.Fatal("Expected to marshal") } w.Write(data) }) mux.HandleFunc("/url/path", func(w http.ResponseWriter, r *http.Request) { if r.Method != "POST" { t.Fatal("Expected POST request") } if r.FormValue("file.path") == "" { w.WriteHeader(501) return } if r.FormValue("metadata.path") == "" { w.WriteHeader(502) return } _, err := ioutil.ReadFile(r.FormValue("file.path")) if err != nil { w.WriteHeader(404) return } metadata, err := ioutil.ReadFile(r.FormValue("metadata.path")) if err != nil { w.WriteHeader(404) return } gz, err := gzip.NewReader(bytes.NewReader(metadata)) if err != nil { w.WriteHeader(405) return } defer gz.Close() metadata, err = ioutil.ReadAll(gz) if err != nil { w.WriteHeader(404) return } if !bytes.HasPrefix(metadata, []byte(zipartifacts.MetadataHeaderPrefix+zipartifacts.MetadataHeader)) { w.WriteHeader(400) return } w.WriteHeader(200) }) return testhelper.TestServerWithHandler(nil, mux.ServeHTTP) } func testUploadArtifacts(contentType string, body io.Reader, t *testing.T, ts *httptest.Server) *httptest.ResponseRecorder { httpRequest, err := http.NewRequest("POST", ts.URL+"/url/path", body) if err != nil { t.Fatal(err) } httpRequest.Header.Set("Content-Type", contentType) response := httptest.NewRecorder() apiClient := api.NewAPI(helper.URLMustParse(ts.URL), "123", nil) proxyClient := proxy.NewProxy(helper.URLMustParse(ts.URL), "123", nil) UploadArtifacts(apiClient, proxyClient).ServeHTTP(response, httpRequest) return response } func TestUploadHandlerAddingMetadata(t *testing.T) { tempPath, err := ioutil.TempDir("", "uploads") if err != nil { t.Fatal(err) } defer os.RemoveAll(tempPath) ts := testArtifactsUploadServer(t, tempPath) defer ts.Close() var buffer bytes.Buffer writer := multipart.NewWriter(&buffer) file, err := writer.CreateFormFile("file", "my.file") if err != nil { t.Fatal(err) } archive := zip.NewWriter(file) defer archive.Close() fileInArchive, err := archive.Create("test.file") if err != nil { t.Fatal(err) } fmt.Fprint(fileInArchive, "test") archive.Close() writer.Close() response := testUploadArtifacts(writer.FormDataContentType(), &buffer, t, ts) testhelper.AssertResponseCode(t, response, 200) } func TestUploadHandlerForUnsupportedArchive(t *testing.T) { tempPath, err := ioutil.TempDir("", "uploads") if err != nil { t.Fatal(err) } defer os.RemoveAll(tempPath) ts := testArtifactsUploadServer(t, tempPath) defer ts.Close() var buffer bytes.Buffer writer := multipart.NewWriter(&buffer) file, err := writer.CreateFormFile("file", "my.file") if err != nil { t.Fatal(err) } fmt.Fprint(file, "test") writer.Close() response := testUploadArtifacts(writer.FormDataContentType(), &buffer, t, ts) // 502 is a custom response code from the mock server in testUploadArtifacts testhelper.AssertResponseCode(t, response, 502) } func TestUploadFormProcessing(t *testing.T) { tempPath, err := ioutil.TempDir("", "uploads") if err != nil { t.Fatal(err) } defer os.RemoveAll(tempPath) ts := testArtifactsUploadServer(t, tempPath) defer ts.Close() var buffer bytes.Buffer writer := multipart.NewWriter(&buffer) file, err := writer.CreateFormFile("metadata", "my.file") if err != nil { t.Fatal(err) } fmt.Fprint(file, "test") writer.Close() response := testUploadArtifacts(writer.FormDataContentType(), &buffer, t, ts) testhelper.AssertResponseCode(t, response, 500) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/artifacts/escape_quotes.go000066400000000000000000000003261265414173000315110ustar00rootroot00000000000000package artifacts import "strings" // taken from mime/multipart/writer.go var quoteEscaper = strings.NewReplacer("\\", "\\\\", `"`, "\\\"") func escapeQuotes(s string) string { return quoteEscaper.Replace(s) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/badgateway/000077500000000000000000000000001265414173000264515ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/badgateway/roundtripper.go000066400000000000000000000035611265414173000315420ustar00rootroot00000000000000package badgateway import ( "../helper" "bytes" "fmt" "io/ioutil" "net" "net/http" "time" ) // Values from http.DefaultTransport var DefaultDialer = &net.Dialer{ Timeout: 30 * time.Second, KeepAlive: 30 * time.Second, } var DefaultTransport = &http.Transport{ Proxy: http.ProxyFromEnvironment, // from http.DefaultTransport Dial: DefaultDialer.Dial, // from http.DefaultTransport TLSHandshakeTimeout: 10 * time.Second, // from http.DefaultTransport } type RoundTripper struct { Transport *http.Transport } func NewRoundTripper(socket string, proxyHeadersTimeout time.Duration) *RoundTripper { tr := *DefaultTransport tr.ResponseHeaderTimeout = proxyHeadersTimeout if socket != "" { tr.Dial = func(_, _ string) (net.Conn, error) { return DefaultDialer.Dial("unix", socket) } } return &RoundTripper{Transport: &tr} } func (t *RoundTripper) RoundTrip(r *http.Request) (res *http.Response, err error) { res, err = t.Transport.RoundTrip(r) // httputil.ReverseProxy translates all errors from this // RoundTrip function into 500 errors. But the most likely error // is that the Rails app is not responding, in which case users // and administrators expect to see a 502 error. To show 502s // instead of 500s we catch the RoundTrip error here and inject a // 502 response. if err != nil { helper.LogError(fmt.Errorf("proxyRoundTripper: %s %q failed with: %q", r.Method, r.RequestURI, err)) res = &http.Response{ StatusCode: http.StatusBadGateway, Status: http.StatusText(http.StatusBadGateway), Request: r, ProtoMajor: r.ProtoMajor, ProtoMinor: r.ProtoMinor, Proto: r.Proto, Header: make(http.Header), Trailer: make(http.Header), Body: ioutil.NopCloser(bytes.NewBufferString(err.Error())), } res.Header.Set("Content-Type", "text/plain") err = nil } return } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/git/000077500000000000000000000000001265414173000251245ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/git/archive.go000066400000000000000000000115741265414173000271040ustar00rootroot00000000000000/* In this file we handle 'git archive' downloads */ package git import ( "../api" "../helper" "fmt" "io" "io/ioutil" "log" "net/http" "os" "os/exec" "path" "path/filepath" "syscall" "time" ) func GetArchive(a *api.API) http.Handler { return repoPreAuthorizeHandler(a, handleGetArchive) } func handleGetArchive(w http.ResponseWriter, r *http.Request, a *api.Response) { var format string urlPath := r.URL.Path switch filepath.Base(urlPath) { case "archive.zip": format = "zip" case "archive.tar": format = "tar" case "archive", "archive.tar.gz": format = "tar.gz" case "archive.tar.bz2": format = "tar.bz2" default: helper.Fail500(w, fmt.Errorf("handleGetArchive: invalid format: %s", urlPath)) return } archiveFilename := path.Base(a.ArchivePath) if cachedArchive, err := os.Open(a.ArchivePath); err == nil { defer cachedArchive.Close() log.Printf("Serving cached file %q", a.ArchivePath) setArchiveHeaders(w, format, archiveFilename) // Even if somebody deleted the cachedArchive from disk since we opened // the file, Unix file semantics guarantee we can still read from the // open file in this process. http.ServeContent(w, r, "", time.Unix(0, 0), cachedArchive) return } // We assume the tempFile has a unique name so that concurrent requests are // safe. We create the tempfile in the same directory as the final cached // archive we want to create so that we can use an atomic link(2) operation // to finalize the cached archive. tempFile, err := prepareArchiveTempfile(path.Dir(a.ArchivePath), archiveFilename) if err != nil { helper.Fail500(w, fmt.Errorf("handleGetArchive: create tempfile: %v", err)) return } defer tempFile.Close() defer os.Remove(tempFile.Name()) compressCmd, archiveFormat := parseArchiveFormat(format) archiveCmd := gitCommand("", "git", "--git-dir="+a.RepoPath, "archive", "--format="+archiveFormat, "--prefix="+a.ArchivePrefix+"/", a.CommitId) archiveStdout, err := archiveCmd.StdoutPipe() if err != nil { helper.Fail500(w, fmt.Errorf("handleGetArchive: archive stdout: %v", err)) return } defer archiveStdout.Close() if err := archiveCmd.Start(); err != nil { helper.Fail500(w, fmt.Errorf("handleGetArchive: start %v: %v", archiveCmd.Args, err)) return } defer helper.CleanUpProcessGroup(archiveCmd) // Ensure brute force subprocess clean-up var stdout io.ReadCloser if compressCmd == nil { stdout = archiveStdout } else { compressCmd.Stdin = archiveStdout compressCmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true} stdout, err = compressCmd.StdoutPipe() if err != nil { helper.Fail500(w, fmt.Errorf("handleGetArchive: compress stdout: %v", err)) return } defer stdout.Close() if err := compressCmd.Start(); err != nil { helper.Fail500(w, fmt.Errorf("handleGetArchive: start %v: %v", compressCmd.Args, err)) return } defer helper.CleanUpProcessGroup(compressCmd) archiveStdout.Close() } // Every Read() from stdout will be synchronously written to tempFile // before it comes out the TeeReader. archiveReader := io.TeeReader(stdout, tempFile) // Start writing the response setArchiveHeaders(w, format, archiveFilename) w.WriteHeader(200) // Don't bother with HTTP 500 from this point on, just return if _, err := io.Copy(w, archiveReader); err != nil { helper.LogError(fmt.Errorf("handleGetArchive: copy 'git archive' output: %v", err)) return } if err := archiveCmd.Wait(); err != nil { helper.LogError(fmt.Errorf("handleGetArchive: archiveCmd: %v", err)) return } if compressCmd != nil { if err := compressCmd.Wait(); err != nil { helper.LogError(fmt.Errorf("handleGetArchive: compressCmd: %v", err)) return } } if err := finalizeCachedArchive(tempFile, a.ArchivePath); err != nil { helper.LogError(fmt.Errorf("handleGetArchive: finalize cached archive: %v", err)) return } } func setArchiveHeaders(w http.ResponseWriter, format string, archiveFilename string) { w.Header().Add("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, archiveFilename)) if format == "zip" { w.Header().Add("Content-Type", "application/zip") } else { w.Header().Add("Content-Type", "application/octet-stream") } w.Header().Add("Content-Transfer-Encoding", "binary") w.Header().Add("Cache-Control", "private") } func parseArchiveFormat(format string) (*exec.Cmd, string) { switch format { case "tar": return nil, "tar" case "tar.gz": return exec.Command("gzip", "-c", "-n"), "tar" case "tar.bz2": return exec.Command("bzip2", "-c"), "tar" case "zip": return nil, "zip" } return nil, "unknown" } func prepareArchiveTempfile(dir string, prefix string) (*os.File, error) { if err := os.MkdirAll(dir, 0700); err != nil { return nil, err } return ioutil.TempFile(dir, prefix) } func finalizeCachedArchive(tempFile *os.File, archivePath string) error { if err := tempFile.Close(); err != nil { return err } return os.Link(tempFile.Name(), archivePath) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/git/blob.go000066400000000000000000000027501265414173000263750ustar00rootroot00000000000000package git import ( "../helper" "encoding/base64" "encoding/json" "fmt" "io" "log" "net/http" "strings" ) type blobParams struct { RepoPath string BlobId string } const SendBlobPrefix = "git-blob:" func SendBlob(w http.ResponseWriter, r *http.Request, sendData string) { params, err := unpackSendData(sendData) if err != nil { helper.Fail500(w, fmt.Errorf("SendBlob: unpack sendData: %v", err)) return } log.Printf("SendBlob: sending %q for %q", params.BlobId, r.URL.Path) gitShowCmd := gitCommand("", "git", "--git-dir="+params.RepoPath, "cat-file", "blob", params.BlobId) stdout, err := gitShowCmd.StdoutPipe() if err != nil { helper.Fail500(w, fmt.Errorf("SendBlob: git stdout: %v", err)) return } if err := gitShowCmd.Start(); err != nil { helper.Fail500(w, fmt.Errorf("SendBlob: start %v: %v", gitShowCmd, err)) return } defer helper.CleanUpProcessGroup(gitShowCmd) if _, err := io.Copy(w, stdout); err != nil { helper.LogError(fmt.Errorf("SendBlob: copy git cat-file stdout: %v", err)) return } if err := gitShowCmd.Wait(); err != nil { helper.LogError(fmt.Errorf("SendBlob: wait for git cat-file: %v", err)) return } } func unpackSendData(sendData string) (*blobParams, error) { jsonBytes, err := base64.URLEncoding.DecodeString(strings.TrimPrefix(sendData, SendBlobPrefix)) if err != nil { return nil, err } result := &blobParams{} if err := json.Unmarshal([]byte(jsonBytes), result); err != nil { return nil, err } return result, nil } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/git/command.go000066400000000000000000000012671265414173000270770ustar00rootroot00000000000000package git import ( "fmt" "os" "os/exec" "syscall" ) // Git subprocess helpers func gitCommand(gl_id string, name string, args ...string) *exec.Cmd { cmd := exec.Command(name, args...) // Start the command in its own process group (nice for signalling) cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: true} // Explicitly set the environment for the Git command cmd.Env = []string{ fmt.Sprintf("HOME=%s", os.Getenv("HOME")), fmt.Sprintf("PATH=%s", os.Getenv("PATH")), fmt.Sprintf("LD_LIBRARY_PATH=%s", os.Getenv("LD_LIBRARY_PATH")), fmt.Sprintf("GL_ID=%s", gl_id), } // If we don't do something with cmd.Stderr, Git errors will be lost cmd.Stderr = os.Stderr return cmd } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/git/git-http.go000066400000000000000000000114161265414173000272160ustar00rootroot00000000000000/* In this file we handle the Git 'smart HTTP' protocol */ package git import ( "../api" "../helper" "errors" "fmt" "io" "log" "net/http" "os" "path" "path/filepath" "strings" ) func GetInfoRefs(a *api.API) http.Handler { return repoPreAuthorizeHandler(a, handleGetInfoRefs) } func PostRPC(a *api.API) http.Handler { return repoPreAuthorizeHandler(a, handlePostRPC) } func looksLikeRepo(p string) bool { // If /path/to/foo.git/objects exists then let's assume it is a valid Git // repository. if _, err := os.Stat(path.Join(p, "objects")); err != nil { log.Print(err) return false } return true } func repoPreAuthorizeHandler(myAPI *api.API, handleFunc api.HandleFunc) http.Handler { return myAPI.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) { if a.RepoPath == "" { helper.Fail500(w, errors.New("repoPreAuthorizeHandler: RepoPath empty")) return } if !looksLikeRepo(a.RepoPath) { http.Error(w, "Not Found", 404) return } handleFunc(w, r, a) }, "") } func handleGetInfoRefs(w http.ResponseWriter, r *http.Request, a *api.Response) { rpc := r.URL.Query().Get("service") if !(rpc == "git-upload-pack" || rpc == "git-receive-pack") { // The 'dumb' Git HTTP protocol is not supported http.Error(w, "Not Found", 404) return } // Prepare our Git subprocess cmd := gitCommand(a.GL_ID, "git", subCommand(rpc), "--stateless-rpc", "--advertise-refs", a.RepoPath) stdout, err := cmd.StdoutPipe() if err != nil { helper.Fail500(w, fmt.Errorf("handleGetInfoRefs: stdout: %v", err)) return } defer stdout.Close() if err := cmd.Start(); err != nil { helper.Fail500(w, fmt.Errorf("handleGetInfoRefs: start %v: %v", cmd.Args, err)) return } defer helper.CleanUpProcessGroup(cmd) // Ensure brute force subprocess clean-up // Start writing the response w.Header().Add("Content-Type", fmt.Sprintf("application/x-%s-advertisement", rpc)) w.Header().Add("Cache-Control", "no-cache") w.WriteHeader(200) // Don't bother with HTTP 500 from this point on, just return if err := pktLine(w, fmt.Sprintf("# service=%s\n", rpc)); err != nil { helper.LogError(fmt.Errorf("handleGetInfoRefs: pktLine: %v", err)) return } if err := pktFlush(w); err != nil { helper.LogError(fmt.Errorf("handleGetInfoRefs: pktFlush: %v", err)) return } if _, err := io.Copy(w, stdout); err != nil { helper.LogError(fmt.Errorf("handleGetInfoRefs: copy output of %v: %v", cmd.Args, err)) return } if err := cmd.Wait(); err != nil { helper.LogError(fmt.Errorf("handleGetInfoRefs: wait for %v: %v", cmd.Args, err)) return } } func handlePostRPC(w http.ResponseWriter, r *http.Request, a *api.Response) { var err error // Get Git action from URL action := filepath.Base(r.URL.Path) if !(action == "git-upload-pack" || action == "git-receive-pack") { // The 'dumb' Git HTTP protocol is not supported helper.Fail500(w, fmt.Errorf("handlePostRPC: unsupported action: %s", r.URL.Path)) return } // Prepare our Git subprocess cmd := gitCommand(a.GL_ID, "git", subCommand(action), "--stateless-rpc", a.RepoPath) stdout, err := cmd.StdoutPipe() if err != nil { helper.Fail500(w, fmt.Errorf("handlePostRPC: stdout: %v", err)) return } defer stdout.Close() stdin, err := cmd.StdinPipe() if err != nil { helper.Fail500(w, fmt.Errorf("handlePostRPC: stdin: %v", err)) return } defer stdin.Close() if err := cmd.Start(); err != nil { helper.Fail500(w, fmt.Errorf("handlePostRPC: start %v: %v", cmd.Args, err)) return } defer helper.CleanUpProcessGroup(cmd) // Ensure brute force subprocess clean-up // Write the client request body to Git's standard input if _, err := io.Copy(stdin, r.Body); err != nil { helper.Fail500(w, fmt.Errorf("handlePostRPC write to %v: %v", cmd.Args, err)) return } // Signal to the Git subprocess that no more data is coming stdin.Close() // It may take a while before we return and the deferred closes happen // so let's free up some resources already. r.Body.Close() // Start writing the response w.Header().Add("Content-Type", fmt.Sprintf("application/x-%s-result", action)) w.Header().Add("Cache-Control", "no-cache") w.WriteHeader(200) // Don't bother with HTTP 500 from this point on, just return // This io.Copy may take a long time, both for Git push and pull. if _, err := io.Copy(w, stdout); err != nil { helper.LogError(fmt.Errorf("handlePostRPC copy output of %v: %v", cmd.Args, err)) return } if err := cmd.Wait(); err != nil { helper.LogError(fmt.Errorf("handlePostRPC wait for %v: %v", cmd.Args, err)) return } } func subCommand(rpc string) string { return strings.TrimPrefix(rpc, "git-") } func pktLine(w io.Writer, s string) error { _, err := fmt.Fprintf(w, "%04x%s", len(s)+4, s) return err } func pktFlush(w io.Writer) error { _, err := fmt.Fprint(w, "0000") return err } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/helper/000077500000000000000000000000001265414173000256205ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/helper/helpers.go000066400000000000000000000037071265414173000276200ustar00rootroot00000000000000package helper import ( "errors" "log" "net/http" "net/url" "os" "os/exec" "syscall" ) func Fail500(w http.ResponseWriter, err error) { http.Error(w, "Internal server error", 500) LogError(err) } func LogError(err error) { log.Printf("error: %v", err) } func SetNoCacheHeaders(header http.Header) { header.Set("Cache-Control", "no-cache, no-store, max-age=0, must-revalidate") header.Set("Pragma", "no-cache") header.Set("Expires", "Fri, 01 Jan 1990 00:00:00 GMT") } func OpenFile(path string) (file *os.File, fi os.FileInfo, err error) { file, err = os.Open(path) if err != nil { return } defer func() { if err != nil { file.Close() } }() fi, err = file.Stat() if err != nil { return } // The os.Open can also open directories if fi.IsDir() { err = &os.PathError{ Op: "open", Path: path, Err: errors.New("path is directory"), } return } return } func URLMustParse(s string) *url.URL { u, err := url.Parse(s) if err != nil { log.Fatalf("urlMustParse: %q %v", s, err) } return u } func HTTPError(w http.ResponseWriter, r *http.Request, error string, code int) { if r.ProtoAtLeast(1, 1) { // Force client to disconnect if we render request error w.Header().Set("Connection", "close") } http.Error(w, error, code) } func HeaderClone(h http.Header) http.Header { h2 := make(http.Header, len(h)) for k, vv := range h { vv2 := make([]string, len(vv)) copy(vv2, vv) h2[k] = vv2 } return h2 } func CleanUpProcessGroup(cmd *exec.Cmd) { if cmd == nil { return } process := cmd.Process if process != nil && process.Pid > 0 { // Send SIGTERM to the process group of cmd syscall.Kill(-process.Pid, syscall.SIGTERM) } // reap our child process cmd.Wait() } func ExitStatus(err error) (int, bool) { exitError, ok := err.(*exec.ExitError) if !ok { return 0, false } waitStatus, ok := exitError.Sys().(syscall.WaitStatus) if !ok { return 0, false } return waitStatus.ExitStatus(), true } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/helper/logging.go000066400000000000000000000020201265414173000275670ustar00rootroot00000000000000package helper import ( "fmt" "net/http" "time" ) type LoggingResponseWriter struct { rw http.ResponseWriter status int written int64 started time.Time } func NewLoggingResponseWriter(rw http.ResponseWriter) LoggingResponseWriter { return LoggingResponseWriter{ rw: rw, started: time.Now(), } } func (l *LoggingResponseWriter) Header() http.Header { return l.rw.Header() } func (l *LoggingResponseWriter) Write(data []byte) (n int, err error) { if l.status == 0 { l.WriteHeader(http.StatusOK) } n, err = l.rw.Write(data) l.written += int64(n) return } func (l *LoggingResponseWriter) WriteHeader(status int) { if l.status != 0 { return } l.status = status l.rw.WriteHeader(status) } func (l *LoggingResponseWriter) Log(r *http.Request) { duration := time.Since(l.started) fmt.Printf("%s %s - - [%s] %q %d %d %q %q %f\n", r.Host, r.RemoteAddr, l.started, fmt.Sprintf("%s %s %s", r.Method, r.RequestURI, r.Proto), l.status, l.written, r.Referer(), r.UserAgent(), duration.Seconds(), ) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/lfs/000077500000000000000000000000001265414173000251255ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/lfs/lfs.go000066400000000000000000000040731265414173000262440ustar00rootroot00000000000000/* In this file we handle git lfs objects downloads and uploads */ package lfs import ( "../api" "../helper" "../proxy" "bytes" "crypto/sha256" "encoding/hex" "errors" "fmt" "io" "io/ioutil" "net/http" "os" "path/filepath" ) func PutStore(a *api.API, p *proxy.Proxy) http.Handler { return lfsAuthorizeHandler(a, handleStoreLfsObject(p)) } func lfsAuthorizeHandler(myAPI *api.API, handleFunc api.HandleFunc) http.Handler { return myAPI.PreAuthorizeHandler(func(w http.ResponseWriter, r *http.Request, a *api.Response) { if a.StoreLFSPath == "" { helper.Fail500(w, errors.New("lfsAuthorizeHandler: StoreLFSPath empty")) return } if a.LfsOid == "" { helper.Fail500(w, errors.New("lfsAuthorizeHandler: LfsOid empty")) return } if err := os.MkdirAll(a.StoreLFSPath, 0700); err != nil { helper.Fail500(w, fmt.Errorf("lfsAuthorizeHandler: mkdir StoreLFSPath: %v", err)) return } handleFunc(w, r, a) }, "/authorize") } func handleStoreLfsObject(h http.Handler) api.HandleFunc { return func(w http.ResponseWriter, r *http.Request, a *api.Response) { file, err := ioutil.TempFile(a.StoreLFSPath, a.LfsOid) if err != nil { helper.Fail500(w, fmt.Errorf("handleStoreLfsObject: create tempfile: %v", err)) return } defer os.Remove(file.Name()) defer file.Close() hash := sha256.New() hw := io.MultiWriter(hash, file) written, err := io.Copy(hw, r.Body) if err != nil { helper.Fail500(w, fmt.Errorf("handleStoreLfsObject: copy body to tempfile: %v", err)) return } file.Close() if written != a.LfsSize { helper.Fail500(w, fmt.Errorf("handleStoreLfsObject: expected size %d, wrote %d", a.LfsSize, written)) return } shaStr := hex.EncodeToString(hash.Sum(nil)) if shaStr != a.LfsOid { helper.Fail500(w, fmt.Errorf("handleStoreLfsObject: expected sha256 %s, got %s", a.LfsOid, shaStr)) return } // Inject header and body r.Header.Set("X-GitLab-Lfs-Tmp", filepath.Base(file.Name())) r.Body = ioutil.NopCloser(&bytes.Buffer{}) r.ContentLength = 0 // And proxy the request h.ServeHTTP(w, r) } } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/proxy/000077500000000000000000000000001265414173000255225ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/proxy/proxy.go000066400000000000000000000016151265414173000272350ustar00rootroot00000000000000package proxy import ( "../badgateway" "../helper" "../senddata" "net/http" "net/http/httputil" "net/url" ) type Proxy struct { Version string reverseProxy *httputil.ReverseProxy } func NewProxy(myURL *url.URL, version string, roundTripper *badgateway.RoundTripper) *Proxy { p := Proxy{Version: version} u := *myURL // Make a copy of p.URL u.Path = "" p.reverseProxy = httputil.NewSingleHostReverseProxy(&u) if roundTripper != nil { p.reverseProxy.Transport = roundTripper } else { p.reverseProxy.Transport = badgateway.NewRoundTripper("", 0) } return &p } func (p *Proxy) ServeHTTP(w http.ResponseWriter, r *http.Request) { // Clone request req := *r req.Header = helper.HeaderClone(r.Header) // Set Workhorse version req.Header.Set("Gitlab-Workhorse", p.Version) rw := senddata.NewSendFileResponseWriter(w, &req) defer rw.Flush() p.reverseProxy.ServeHTTP(&rw, &req) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/senddata/000077500000000000000000000000001265414173000261245ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/senddata/sendfile.go000066400000000000000000000040651265414173000302510ustar00rootroot00000000000000/* The xSendFile middleware transparently sends static files in HTTP responses via the X-Sendfile mechanism. All that is needed in the Rails code is the 'send_file' method. */ package senddata import ( "../git" "../helper" "log" "net/http" "strings" ) const ( sendDataResponseHeader = "Gitlab-Workhorse-Send-Data" sendFileResponseHeader = "X-Sendfile" ) type sendFileResponseWriter struct { rw http.ResponseWriter status int hijacked bool req *http.Request } func NewSendFileResponseWriter(rw http.ResponseWriter, req *http.Request) sendFileResponseWriter { s := sendFileResponseWriter{ rw: rw, req: req, } // Advertise to upstream (Rails) that we support X-Sendfile req.Header.Set("X-Sendfile-Type", "X-Sendfile") return s } func (s *sendFileResponseWriter) Header() http.Header { return s.rw.Header() } func (s *sendFileResponseWriter) Write(data []byte) (n int, err error) { if s.status == 0 { s.WriteHeader(http.StatusOK) } if s.hijacked { return } return s.rw.Write(data) } func (s *sendFileResponseWriter) WriteHeader(status int) { if s.status != 0 { return } s.status = status if s.status != http.StatusOK { s.rw.WriteHeader(s.status) return } if file := s.Header().Get(sendFileResponseHeader); file != "" { s.Header().Del(sendFileResponseHeader) // Mark this connection as hijacked s.hijacked = true // Serve the file sendFileFromDisk(s.rw, s.req, file) return } if sendData := s.Header().Get(sendDataResponseHeader); strings.HasPrefix(sendData, git.SendBlobPrefix) { s.Header().Del(sendDataResponseHeader) s.hijacked = true git.SendBlob(s.rw, s.req, sendData) return } s.rw.WriteHeader(s.status) return } func sendFileFromDisk(w http.ResponseWriter, r *http.Request, file string) { log.Printf("Send file %q for %s %q", file, r.Method, r.RequestURI) content, fi, err := helper.OpenFile(file) if err != nil { http.NotFound(w, r) return } defer content.Close() http.ServeContent(w, r, "", fi.ModTime(), content) } func (s *sendFileResponseWriter) Flush() { s.WriteHeader(http.StatusOK) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/staticpages/000077500000000000000000000000001265414173000266505ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/staticpages/deploy_page.go000066400000000000000000000010351265414173000314660ustar00rootroot00000000000000package staticpages import ( "../helper" "io/ioutil" "net/http" "path/filepath" ) func (s *Static) DeployPage(handler http.Handler) http.Handler { deployPage := filepath.Join(s.DocumentRoot, "index.html") return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { data, err := ioutil.ReadFile(deployPage) if err != nil { handler.ServeHTTP(w, r) return } helper.SetNoCacheHeaders(w.Header()) w.Header().Set("Content-Type", "text/html; charset=utf-8") w.WriteHeader(http.StatusOK) w.Write(data) }) } deploy_page_test.go000066400000000000000000000021701265414173000324470ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/staticpagespackage staticpages import ( "../testhelper" "io/ioutil" "net/http" "net/http/httptest" "os" "path/filepath" "testing" ) func TestIfNoDeployPageExist(t *testing.T) { dir, err := ioutil.TempDir("", "deploy") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) w := httptest.NewRecorder() executed := false st := &Static{dir} st.DeployPage(http.HandlerFunc(func(_ http.ResponseWriter, _ *http.Request) { executed = true })).ServeHTTP(w, nil) if !executed { t.Error("The handler should get executed") } } func TestIfDeployPageExist(t *testing.T) { dir, err := ioutil.TempDir("", "deploy") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) deployPage := "DEPLOY" ioutil.WriteFile(filepath.Join(dir, "index.html"), []byte(deployPage), 0600) w := httptest.NewRecorder() executed := false st := &Static{dir} st.DeployPage(http.HandlerFunc(func(_ http.ResponseWriter, _ *http.Request) { executed = true })).ServeHTTP(w, nil) if executed { t.Error("The handler should not get executed") } w.Flush() testhelper.AssertResponseCode(t, w, 200) testhelper.AssertResponseBody(t, w, deployPage) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/staticpages/error_pages.go000066400000000000000000000030141265414173000315050ustar00rootroot00000000000000package staticpages import ( "../helper" "fmt" "io/ioutil" "log" "net/http" "path/filepath" ) type errorPageResponseWriter struct { rw http.ResponseWriter status int hijacked bool path string } func (s *errorPageResponseWriter) Header() http.Header { return s.rw.Header() } func (s *errorPageResponseWriter) Write(data []byte) (n int, err error) { if s.status == 0 { s.WriteHeader(http.StatusOK) } if s.hijacked { return 0, nil } return s.rw.Write(data) } func (s *errorPageResponseWriter) WriteHeader(status int) { if s.status != 0 { return } s.status = status if 400 <= s.status && s.status <= 599 { errorPageFile := filepath.Join(s.path, fmt.Sprintf("%d.html", s.status)) // check if custom error page exists, serve this page instead if data, err := ioutil.ReadFile(errorPageFile); err == nil { s.hijacked = true log.Printf("ErrorPage: serving predefined error page: %d", s.status) helper.SetNoCacheHeaders(s.rw.Header()) s.rw.Header().Set("Content-Type", "text/html; charset=utf-8") s.rw.WriteHeader(s.status) s.rw.Write(data) return } } s.rw.WriteHeader(status) } func (s *errorPageResponseWriter) Flush() { s.WriteHeader(http.StatusOK) } func (st *Static) ErrorPagesUnless(disabled bool, handler http.Handler) http.Handler { if disabled { return handler } return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { rw := errorPageResponseWriter{ rw: w, path: st.DocumentRoot, } defer rw.Flush() handler.ServeHTTP(&rw, r) }) } error_pages_test.go000066400000000000000000000035231265414173000324720ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/staticpagespackage staticpages import ( "../testhelper" "fmt" "io/ioutil" "net/http" "net/http/httptest" "os" "path/filepath" "testing" ) func TestIfErrorPageIsPresented(t *testing.T) { dir, err := ioutil.TempDir("", "error_page") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) errorPage := "ERROR" ioutil.WriteFile(filepath.Join(dir, "404.html"), []byte(errorPage), 0600) w := httptest.NewRecorder() h := http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(404) fmt.Fprint(w, "Not Found") }) st := &Static{dir} st.ErrorPagesUnless(false, h).ServeHTTP(w, nil) w.Flush() testhelper.AssertResponseCode(t, w, 404) testhelper.AssertResponseBody(t, w, errorPage) } func TestIfErrorPassedIfNoErrorPageIsFound(t *testing.T) { dir, err := ioutil.TempDir("", "error_page") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) w := httptest.NewRecorder() errorResponse := "ERROR" h := http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(404) fmt.Fprint(w, errorResponse) }) st := &Static{dir} st.ErrorPagesUnless(false, h).ServeHTTP(w, nil) w.Flush() testhelper.AssertResponseCode(t, w, 404) testhelper.AssertResponseBody(t, w, errorResponse) } func TestIfErrorPageIsIgnoredInDevelopment(t *testing.T) { dir, err := ioutil.TempDir("", "error_page") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) errorPage := "ERROR" ioutil.WriteFile(filepath.Join(dir, "500.html"), []byte(errorPage), 0600) w := httptest.NewRecorder() serverError := "Interesting Server Error" h := http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(500) fmt.Fprint(w, serverError) }) st := &Static{dir} st.ErrorPagesUnless(true, h).ServeHTTP(w, nil) w.Flush() testhelper.AssertResponseCode(t, w, 500) testhelper.AssertResponseBody(t, w, serverError) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/staticpages/servefile.go000066400000000000000000000035331265414173000311670ustar00rootroot00000000000000package staticpages import ( "../helper" "../urlprefix" "log" "net/http" "os" "path/filepath" "strings" "time" ) type CacheMode int const ( CacheDisabled CacheMode = iota CacheExpireMax ) // BUG/QUIRK: If a client requests 'foo%2Fbar' and 'foo/bar' exists, // handleServeFile will serve foo/bar instead of passing the request // upstream. func (s *Static) ServeExisting(prefix urlprefix.Prefix, cache CacheMode, notFoundHandler http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { file := filepath.Join(s.DocumentRoot, prefix.Strip(r.URL.Path)) // The filepath.Join does Clean traversing directories up if !strings.HasPrefix(file, s.DocumentRoot) { helper.Fail500(w, &os.PathError{ Op: "open", Path: file, Err: os.ErrInvalid, }) return } var content *os.File var fi os.FileInfo var err error // Serve pre-gzipped assets if acceptEncoding := r.Header.Get("Accept-Encoding"); strings.Contains(acceptEncoding, "gzip") { content, fi, err = helper.OpenFile(file + ".gz") if err == nil { w.Header().Set("Content-Encoding", "gzip") } } // If not found, open the original file if content == nil || err != nil { content, fi, err = helper.OpenFile(file) } if err != nil { if notFoundHandler != nil { notFoundHandler.ServeHTTP(w, r) } else { http.NotFound(w, r) } return } defer content.Close() switch cache { case CacheExpireMax: // Cache statically served files for 1 year cacheUntil := time.Now().AddDate(1, 0, 0).Format(http.TimeFormat) w.Header().Set("Cache-Control", "public") w.Header().Set("Expires", cacheUntil) } log.Printf("Send static file %q (%q) for %s %q", file, w.Header().Get("Content-Encoding"), r.Method, r.RequestURI) http.ServeContent(w, r, filepath.Base(file), fi.ModTime(), content) }) } servefile_test.go000066400000000000000000000067501265414173000321530ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/staticpagespackage staticpages import ( "../testhelper" "bytes" "compress/gzip" "io/ioutil" "net/http" "net/http/httptest" "os" "path/filepath" "testing" ) func TestServingNonExistingFile(t *testing.T) { dir := "/path/to/non/existing/directory" httpRequest, _ := http.NewRequest("GET", "/file", nil) w := httptest.NewRecorder() st := &Static{dir} st.ServeExisting("/", CacheDisabled, nil).ServeHTTP(w, httpRequest) testhelper.AssertResponseCode(t, w, 404) } func TestServingDirectory(t *testing.T) { dir, err := ioutil.TempDir("", "deploy") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) httpRequest, _ := http.NewRequest("GET", "/file", nil) w := httptest.NewRecorder() st := &Static{dir} st.ServeExisting("/", CacheDisabled, nil).ServeHTTP(w, httpRequest) testhelper.AssertResponseCode(t, w, 404) } func TestServingMalformedUri(t *testing.T) { dir := "/path/to/non/existing/directory" httpRequest, _ := http.NewRequest("GET", "/../../../static/file", nil) w := httptest.NewRecorder() st := &Static{dir} st.ServeExisting("/", CacheDisabled, nil).ServeHTTP(w, httpRequest) testhelper.AssertResponseCode(t, w, 404) } func TestExecutingHandlerWhenNoFileFound(t *testing.T) { dir := "/path/to/non/existing/directory" httpRequest, _ := http.NewRequest("GET", "/file", nil) executed := false st := &Static{dir} st.ServeExisting("/", CacheDisabled, http.HandlerFunc(func(_ http.ResponseWriter, r *http.Request) { executed = (r == httpRequest) })).ServeHTTP(nil, httpRequest) if !executed { t.Error("The handler should get executed") } } func TestServingTheActualFile(t *testing.T) { dir, err := ioutil.TempDir("", "deploy") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) httpRequest, _ := http.NewRequest("GET", "/file", nil) fileContent := "STATIC" ioutil.WriteFile(filepath.Join(dir, "file"), []byte(fileContent), 0600) w := httptest.NewRecorder() st := &Static{dir} st.ServeExisting("/", CacheDisabled, nil).ServeHTTP(w, httpRequest) testhelper.AssertResponseCode(t, w, 200) if w.Body.String() != fileContent { t.Error("We should serve the file: ", w.Body.String()) } } func testServingThePregzippedFile(t *testing.T, enableGzip bool) { dir, err := ioutil.TempDir("", "deploy") if err != nil { t.Fatal(err) } defer os.RemoveAll(dir) httpRequest, _ := http.NewRequest("GET", "/file", nil) if enableGzip { httpRequest.Header.Set("Accept-Encoding", "gzip, deflate") } fileContent := "STATIC" var fileGzipContent bytes.Buffer fileGzip := gzip.NewWriter(&fileGzipContent) fileGzip.Write([]byte(fileContent)) fileGzip.Close() ioutil.WriteFile(filepath.Join(dir, "file.gz"), fileGzipContent.Bytes(), 0600) ioutil.WriteFile(filepath.Join(dir, "file"), []byte(fileContent), 0600) w := httptest.NewRecorder() st := &Static{dir} st.ServeExisting("/", CacheDisabled, nil).ServeHTTP(w, httpRequest) testhelper.AssertResponseCode(t, w, 200) if enableGzip { testhelper.AssertResponseHeader(t, w, "Content-Encoding", "gzip") if bytes.Compare(w.Body.Bytes(), fileGzipContent.Bytes()) != 0 { t.Error("We should serve the pregzipped file") } } else { testhelper.AssertResponseCode(t, w, 200) testhelper.AssertResponseHeader(t, w, "Content-Encoding", "") if w.Body.String() != fileContent { t.Error("We should serve the file: ", w.Body.String()) } } } func TestServingThePregzippedFile(t *testing.T) { testServingThePregzippedFile(t, true) } func TestServingThePregzippedFileWithoutEncoding(t *testing.T) { testServingThePregzippedFile(t, false) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/staticpages/static.go000066400000000000000000000001011265414173000304560ustar00rootroot00000000000000package staticpages type Static struct { DocumentRoot string } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/testhelper/000077500000000000000000000000001265414173000265205ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/testhelper/testhelper.go000066400000000000000000000025701265414173000312320ustar00rootroot00000000000000package testhelper import ( "log" "net/http" "net/http/httptest" "regexp" "testing" ) func AssertResponseCode(t *testing.T, response *httptest.ResponseRecorder, expectedCode int) { if response.Code != expectedCode { t.Fatalf("for HTTP request expected to get %d, got %d instead", expectedCode, response.Code) } } func AssertResponseBody(t *testing.T, response *httptest.ResponseRecorder, expectedBody string) { if response.Body.String() != expectedBody { t.Fatalf("for HTTP request expected to receive %q, got %q instead as body", expectedBody, response.Body.String()) } } func AssertResponseHeader(t *testing.T, response *httptest.ResponseRecorder, header string, expectedValue string) { if response.Header().Get(header) != expectedValue { t.Fatalf("for HTTP request expected to receive the header %q with %q, got %q", header, expectedValue, response.Header().Get(header)) } } func TestServerWithHandler(url *regexp.Regexp, handler http.HandlerFunc) *httptest.Server { return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if url != nil && !url.MatchString(r.URL.Path) { log.Println("UPSTREAM", r.Method, r.URL, "DENY") w.WriteHeader(404) return } if version := r.Header.Get("Gitlab-Workhorse"); version == "" { log.Println("UPSTREAM", r.Method, r.URL, "DENY") w.WriteHeader(403) return } handler(w, r) })) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/upload/000077500000000000000000000000001265414173000256255ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/upload/uploads.go000066400000000000000000000063321265414173000276270ustar00rootroot00000000000000package upload import ( "../helper" "bytes" "fmt" "io" "io/ioutil" "mime/multipart" "net/http" "os" ) type MultipartFormProcessor interface { ProcessFile(formName, fileName string, writer *multipart.Writer) error ProcessField(formName string, writer *multipart.Writer) error } func rewriteFormFilesFromMultipart(r *http.Request, writer *multipart.Writer, tempPath string, filter MultipartFormProcessor) (cleanup func(), err error) { // Create multipart reader reader, err := r.MultipartReader() if err != nil { if err == http.ErrNotMultipart { // We want to be able to recognize http.ErrNotMultipart elsewhere so no fmt.Errorf return nil, http.ErrNotMultipart } return nil, fmt.Errorf("get multipart reader: %v", err) } var files []string cleanup = func() { for _, file := range files { os.Remove(file) } } // Execute cleanup in case of failure defer func() { if err != nil { cleanup() } }() for { p, err := reader.NextPart() if err == io.EOF { break } name := p.FormName() if name == "" { continue } // Copy form field if filename := p.FileName(); filename != "" { // Create temporary directory where the uploaded file will be stored if err := os.MkdirAll(tempPath, 0700); err != nil { return cleanup, fmt.Errorf("mkdir for tempfile: %v", err) } // Create temporary file in path returned by Authorization filter file, err := ioutil.TempFile(tempPath, "upload_") if err != nil { return cleanup, fmt.Errorf("create tempfile: %v", err) } defer file.Close() // Add file entry writer.WriteField(name+".path", file.Name()) writer.WriteField(name+".name", filename) files = append(files, file.Name()) _, err = io.Copy(file, p) if err != nil { return cleanup, fmt.Errorf("copy from multipart to tempfile: %v", err) } file.Close() if err := filter.ProcessFile(name, file.Name(), writer); err != nil { return cleanup, err } } else { np, err := writer.CreatePart(p.Header) if err != nil { return cleanup, fmt.Errorf("create multipart field: %v", err) } _, err = io.Copy(np, p) if err != nil { return cleanup, fmt.Errorf("duplicate multipart field: %v", err) } if err := filter.ProcessField(name, writer); err != nil { return cleanup, fmt.Errorf("process multipart field: %v", err) } } } return cleanup, nil } func HandleFileUploads(w http.ResponseWriter, r *http.Request, h http.Handler, tempPath string, filter MultipartFormProcessor) { if tempPath == "" { helper.Fail500(w, fmt.Errorf("handleFileUploads: tempPath empty")) return } var body bytes.Buffer writer := multipart.NewWriter(&body) defer writer.Close() // Rewrite multipart form data cleanup, err := rewriteFormFilesFromMultipart(r, writer, tempPath, filter) if err != nil { if err == http.ErrNotMultipart { h.ServeHTTP(w, r) } else { helper.Fail500(w, fmt.Errorf("handleFileUploads: extract files from multipart: %v", err)) } return } if cleanup != nil { defer cleanup() } // Close writer writer.Close() // Hijack the request r.Body = ioutil.NopCloser(&body) r.ContentLength = int64(body.Len()) r.Header.Set("Content-Type", writer.FormDataContentType()) // Proxy the request h.ServeHTTP(w, r) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/upload/uploads_test.go000066400000000000000000000120531265414173000306630ustar00rootroot00000000000000package upload import ( "../helper" "../proxy" "../testhelper" "bytes" "errors" "fmt" "io" "io/ioutil" "mime/multipart" "net/http" "net/http/httptest" "os" "regexp" "strings" "testing" ) var nilHandler = http.HandlerFunc(func(http.ResponseWriter, *http.Request) {}) type testFormProcessor struct { } func (a *testFormProcessor) ProcessFile(formName, fileName string, writer *multipart.Writer) error { if formName != "file" && fileName != "my.file" { return errors.New("illegal file") } return nil } func (a *testFormProcessor) ProcessField(formName string, writer *multipart.Writer) error { if formName != "token" { return errors.New("illegal field") } return nil } func TestUploadTempPathRequirement(t *testing.T) { response := httptest.NewRecorder() request := &http.Request{} HandleFileUploads(response, request, nilHandler, "", nil) testhelper.AssertResponseCode(t, response, 500) } func TestUploadHandlerForwardingRawData(t *testing.T) { ts := testhelper.TestServerWithHandler(regexp.MustCompile(`/url/path\z`), func(w http.ResponseWriter, r *http.Request) { if r.Method != "PATCH" { t.Fatal("Expected PATCH request") } var body bytes.Buffer io.Copy(&body, r.Body) if body.String() != "REQUEST" { t.Fatal("Expected REQUEST in request body") } w.WriteHeader(202) fmt.Fprint(w, "RESPONSE") }) defer ts.Close() httpRequest, err := http.NewRequest("PATCH", ts.URL+"/url/path", bytes.NewBufferString("REQUEST")) if err != nil { t.Fatal(err) } tempPath, err := ioutil.TempDir("", "uploads") if err != nil { t.Fatal(err) } defer os.RemoveAll(tempPath) response := httptest.NewRecorder() handler := proxy.NewProxy(helper.URLMustParse(ts.URL), "123", nil) HandleFileUploads(response, httpRequest, handler, tempPath, nil) testhelper.AssertResponseCode(t, response, 202) if response.Body.String() != "RESPONSE" { t.Fatal("Expected RESPONSE in response body") } } func TestUploadHandlerRewritingMultiPartData(t *testing.T) { var filePath string tempPath, err := ioutil.TempDir("", "uploads") if err != nil { t.Fatal(err) } defer os.RemoveAll(tempPath) ts := testhelper.TestServerWithHandler(regexp.MustCompile(`/url/path\z`), func(w http.ResponseWriter, r *http.Request) { if r.Method != "PUT" { t.Fatal("Expected PUT request") } err := r.ParseMultipartForm(100000) if err != nil { t.Fatal(err) } if len(r.MultipartForm.Value) != 3 { t.Fatal("Expected to receive exactly 3 values") } if len(r.MultipartForm.File) != 0 { t.Fatal("Expected to not receive any files") } if r.FormValue("token") != "test" { t.Fatal("Expected to receive token") } if r.FormValue("file.name") != "my.file" { t.Fatal("Expected to receive a filename") } filePath = r.FormValue("file.path") if !strings.HasPrefix(r.FormValue("file.path"), tempPath) { t.Fatal("Expected to the file to be in tempPath") } w.WriteHeader(202) fmt.Fprint(w, "RESPONSE") }) var buffer bytes.Buffer writer := multipart.NewWriter(&buffer) writer.WriteField("token", "test") file, err := writer.CreateFormFile("file", "my.file") if err != nil { t.Fatal(err) } fmt.Fprint(file, "test") writer.Close() httpRequest, err := http.NewRequest("PUT", ts.URL+"/url/path", nil) if err != nil { t.Fatal(err) } httpRequest.Body = ioutil.NopCloser(&buffer) httpRequest.ContentLength = int64(buffer.Len()) httpRequest.Header.Set("Content-Type", writer.FormDataContentType()) response := httptest.NewRecorder() handler := proxy.NewProxy(helper.URLMustParse(ts.URL), "123", nil) HandleFileUploads(response, httpRequest, handler, tempPath, &testFormProcessor{}) testhelper.AssertResponseCode(t, response, 202) if _, err := os.Stat(filePath); !os.IsNotExist(err) { t.Fatal("expected the file to be deleted") } } func TestUploadProcessingField(t *testing.T) { tempPath, err := ioutil.TempDir("", "uploads") if err != nil { t.Fatal(err) } defer os.RemoveAll(tempPath) var buffer bytes.Buffer writer := multipart.NewWriter(&buffer) writer.WriteField("token2", "test") writer.Close() httpRequest, err := http.NewRequest("PUT", "/url/path", &buffer) if err != nil { t.Fatal(err) } httpRequest.Header.Set("Content-Type", writer.FormDataContentType()) response := httptest.NewRecorder() HandleFileUploads(response, httpRequest, nilHandler, tempPath, &testFormProcessor{}) testhelper.AssertResponseCode(t, response, 500) } func TestUploadProcessingFile(t *testing.T) { tempPath, err := ioutil.TempDir("", "uploads") if err != nil { t.Fatal(err) } defer os.RemoveAll(tempPath) var buffer bytes.Buffer writer := multipart.NewWriter(&buffer) file, err := writer.CreateFormFile("file2", "my.file") if err != nil { t.Fatal(err) } fmt.Fprint(file, "test") writer.Close() httpRequest, err := http.NewRequest("PUT", "/url/path", &buffer) if err != nil { t.Fatal(err) } httpRequest.Header.Set("Content-Type", writer.FormDataContentType()) response := httptest.NewRecorder() HandleFileUploads(response, httpRequest, nilHandler, tempPath, &testFormProcessor{}) testhelper.AssertResponseCode(t, response, 500) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/upstream/000077500000000000000000000000001265414173000262015ustar00rootroot00000000000000development_test.go000066400000000000000000000015731265414173000320400ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/upstreampackage upstream import ( "../testhelper" "net/http" "net/http/httptest" "testing" ) func TestDevelopmentModeEnabled(t *testing.T) { developmentMode := true r, _ := http.NewRequest("GET", "/something", nil) w := httptest.NewRecorder() executed := false NotFoundUnless(developmentMode, http.HandlerFunc(func(_ http.ResponseWriter, _ *http.Request) { executed = true })).ServeHTTP(w, r) if !executed { t.Error("The handler should get executed") } } func TestDevelopmentModeDisabled(t *testing.T) { developmentMode := false r, _ := http.NewRequest("GET", "/something", nil) w := httptest.NewRecorder() executed := false NotFoundUnless(developmentMode, http.HandlerFunc(func(_ http.ResponseWriter, _ *http.Request) { executed = true })).ServeHTTP(w, r) if executed { t.Error("The handler should not get executed") } testhelper.AssertResponseCode(t, w, 404) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/upstream/handlers.go000066400000000000000000000013711265414173000303320ustar00rootroot00000000000000package upstream import ( "../helper" "compress/gzip" "fmt" "io" "net/http" ) func contentEncodingHandler(h http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { var body io.ReadCloser var err error // The client request body may have been gzipped. contentEncoding := r.Header.Get("Content-Encoding") switch contentEncoding { case "": body = r.Body case "gzip": body, err = gzip.NewReader(r.Body) default: err = fmt.Errorf("unsupported content encoding: %s", contentEncoding) } if err != nil { helper.Fail500(w, fmt.Errorf("contentEncodingHandler: %v", err)) return } defer body.Close() r.Body = body r.Header.Del("Content-Encoding") h.ServeHTTP(w, r) }) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/upstream/handlers_test.go000066400000000000000000000035351265414173000313750ustar00rootroot00000000000000package upstream import ( "../testhelper" "bytes" "compress/gzip" "fmt" "io/ioutil" "net/http" "net/http/httptest" "reflect" "testing" ) func TestGzipEncoding(t *testing.T) { resp := httptest.NewRecorder() var b bytes.Buffer w := gzip.NewWriter(&b) fmt.Fprint(w, "test") w.Close() body := ioutil.NopCloser(&b) req, err := http.NewRequest("POST", "http://address/test", body) if err != nil { t.Fatal(err) } req.Header.Set("Content-Encoding", "gzip") contentEncodingHandler(http.HandlerFunc(func(_ http.ResponseWriter, r *http.Request) { if _, ok := r.Body.(*gzip.Reader); !ok { t.Fatal("Expected gzip reader for body, but it's:", reflect.TypeOf(r.Body)) } if r.Header.Get("Content-Encoding") != "" { t.Fatal("Content-Encoding should be deleted") } })).ServeHTTP(resp, req) testhelper.AssertResponseCode(t, resp, 200) } func TestNoEncoding(t *testing.T) { resp := httptest.NewRecorder() var b bytes.Buffer body := ioutil.NopCloser(&b) req, err := http.NewRequest("POST", "http://address/test", body) if err != nil { t.Fatal(err) } req.Header.Set("Content-Encoding", "") contentEncodingHandler(http.HandlerFunc(func(_ http.ResponseWriter, r *http.Request) { if r.Body != body { t.Fatal("Expected the same body") } if r.Header.Get("Content-Encoding") != "" { t.Fatal("Content-Encoding should be deleted") } })).ServeHTTP(resp, req) testhelper.AssertResponseCode(t, resp, 200) } func TestInvalidEncoding(t *testing.T) { resp := httptest.NewRecorder() req, err := http.NewRequest("POST", "http://address/test", nil) if err != nil { t.Fatal(err) } req.Header.Set("Content-Encoding", "application/unknown") contentEncodingHandler(http.HandlerFunc(func(_ http.ResponseWriter, _ *http.Request) { t.Fatal("it shouldn't be executed") })).ServeHTTP(resp, req) testhelper.AssertResponseCode(t, resp, 500) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/upstream/notfoundunless.go000066400000000000000000000002751265414173000316220ustar00rootroot00000000000000package upstream import "net/http" func NotFoundUnless(pass bool, handler http.Handler) http.Handler { if pass { return handler } else { return http.HandlerFunc(http.NotFound) } } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/upstream/routes.go000066400000000000000000000072151265414173000300560ustar00rootroot00000000000000package upstream import ( apipkg "../api" "../artifacts" "../git" "../lfs" proxypkg "../proxy" "../staticpages" "net/http" "regexp" ) type route struct { method string regex *regexp.Regexp handler http.Handler } const projectPattern = `^/[^/]+/[^/]+/` const gitProjectPattern = `^/[^/]+/[^/]+\.git/` const apiPattern = `^/api/` // A project ID in an API request is either a number or two strings 'namespace/project' const projectsAPIPattern = `^/api/v3/projects/((\d+)|([^/]+/[^/]+))/` const ciAPIPattern = `^/ci/api/` // Routing table // We match against URI not containing the relativeUrlRoot: // see upstream.ServeHTTP func (u *Upstream) configureRoutes() { api := apipkg.NewAPI( u.Backend, u.Version, u.RoundTripper, ) static := &staticpages.Static{u.DocumentRoot} proxy := proxypkg.NewProxy( u.Backend, u.Version, u.RoundTripper, ) u.Routes = []route{ // Git Clone route{"GET", regexp.MustCompile(gitProjectPattern + `info/refs\z`), git.GetInfoRefs(api)}, route{"POST", regexp.MustCompile(gitProjectPattern + `git-upload-pack\z`), contentEncodingHandler(git.PostRPC(api))}, route{"POST", regexp.MustCompile(gitProjectPattern + `git-receive-pack\z`), contentEncodingHandler(git.PostRPC(api))}, route{"PUT", regexp.MustCompile(gitProjectPattern + `gitlab-lfs/objects/([0-9a-f]{64})/([0-9]+)\z`), lfs.PutStore(api, proxy)}, // Repository Archive route{"GET", regexp.MustCompile(projectPattern + `repository/archive\z`), git.GetArchive(api)}, route{"GET", regexp.MustCompile(projectPattern + `repository/archive.zip\z`), git.GetArchive(api)}, route{"GET", regexp.MustCompile(projectPattern + `repository/archive.tar\z`), git.GetArchive(api)}, route{"GET", regexp.MustCompile(projectPattern + `repository/archive.tar.gz\z`), git.GetArchive(api)}, route{"GET", regexp.MustCompile(projectPattern + `repository/archive.tar.bz2\z`), git.GetArchive(api)}, // Repository Archive API route{"GET", regexp.MustCompile(projectsAPIPattern + `repository/archive\z`), git.GetArchive(api)}, route{"GET", regexp.MustCompile(projectsAPIPattern + `repository/archive.zip\z`), git.GetArchive(api)}, route{"GET", regexp.MustCompile(projectsAPIPattern + `repository/archive.tar\z`), git.GetArchive(api)}, route{"GET", regexp.MustCompile(projectsAPIPattern + `repository/archive.tar.gz\z`), git.GetArchive(api)}, route{"GET", regexp.MustCompile(projectsAPIPattern + `repository/archive.tar.bz2\z`), git.GetArchive(api)}, // CI Artifacts route{"GET", regexp.MustCompile(projectPattern + `builds/[0-9]+/artifacts/file/`), contentEncodingHandler(artifacts.DownloadArtifact(api))}, route{"POST", regexp.MustCompile(ciAPIPattern + `v1/builds/[0-9]+/artifacts\z`), contentEncodingHandler(artifacts.UploadArtifacts(api, proxy))}, // Explicitly proxy API requests route{"", regexp.MustCompile(apiPattern), proxy}, route{"", regexp.MustCompile(ciAPIPattern), proxy}, // Serve assets route{"", regexp.MustCompile(`^/assets/`), static.ServeExisting(u.URLPrefix, staticpages.CacheExpireMax, NotFoundUnless(u.DevelopmentMode, proxy, ), ), }, // For legacy reasons, user uploads are stored under the document root. // To prevent anybody who knows/guesses the URL of a user-uploaded file // from downloading it we make sure requests to /uploads/ do _not_ pass // through static.ServeExisting. route{"", regexp.MustCompile(`^/uploads/`), static.ErrorPagesUnless(u.DevelopmentMode, proxy)}, // Serve static files or forward the requests route{"", nil, static.ServeExisting(u.URLPrefix, staticpages.CacheDisabled, static.DeployPage( static.ErrorPagesUnless(u.DevelopmentMode, proxy, ), ), ), }, } } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/upstream/upstream.go000066400000000000000000000045641265414173000304010ustar00rootroot00000000000000/* The upstream type implements http.Handler. In this file we handle request routing and interaction with the authBackend. */ package upstream import ( "../badgateway" "../helper" "../urlprefix" "fmt" "net/http" "net/url" "strings" "time" ) var DefaultBackend = helper.URLMustParse("http://localhost:8080") type Upstream struct { Backend *url.URL Version string DocumentRoot string DevelopmentMode bool URLPrefix urlprefix.Prefix Routes []route RoundTripper *badgateway.RoundTripper } func NewUpstream(backend *url.URL, socket string, version string, documentRoot string, developmentMode bool, proxyHeadersTimeout time.Duration) *Upstream { up := Upstream{ Backend: backend, Version: version, DocumentRoot: documentRoot, DevelopmentMode: developmentMode, RoundTripper: badgateway.NewRoundTripper(socket, proxyHeadersTimeout), } if backend == nil { up.Backend = DefaultBackend } up.configureURLPrefix() up.configureRoutes() return &up } func (u *Upstream) configureURLPrefix() { relativeURLRoot := u.Backend.Path if !strings.HasSuffix(relativeURLRoot, "/") { relativeURLRoot += "/" } u.URLPrefix = urlprefix.Prefix(relativeURLRoot) } func (u *Upstream) ServeHTTP(ow http.ResponseWriter, r *http.Request) { w := helper.NewLoggingResponseWriter(ow) defer w.Log(r) // Drop WebSocket connection and CONNECT method if r.RequestURI == "*" { helper.HTTPError(&w, r, "Connection upgrade not allowed", http.StatusBadRequest) return } // Disallow connect if r.Method == "CONNECT" { helper.HTTPError(&w, r, "CONNECT not allowed", http.StatusBadRequest) return } // Check URL Root URIPath := urlprefix.CleanURIPath(r.URL.Path) prefix := u.URLPrefix if !prefix.Match(URIPath) { helper.HTTPError(&w, r, fmt.Sprintf("Not found %q", URIPath), http.StatusNotFound) return } // Look for a matching Git service var ro route foundService := false for _, ro = range u.Routes { if ro.method != "" && r.Method != ro.method { continue } if ro.regex == nil || ro.regex.MatchString(prefix.Strip(URIPath)) { foundService = true break } } if !foundService { // The protocol spec in git/Documentation/technical/http-protocol.txt // says we must return 403 if no matching service is found. helper.HTTPError(&w, r, "Forbidden", http.StatusForbidden) return } ro.handler.ServeHTTP(&w, r) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/urlprefix/000077500000000000000000000000001265414173000263615ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/urlprefix/urlprefix.go000066400000000000000000000012601265414173000307270ustar00rootroot00000000000000package urlprefix import ( "path" "strings" ) type Prefix string func (p Prefix) Strip(path string) string { return CleanURIPath(strings.TrimPrefix(path, string(p))) } func (p Prefix) Match(path string) bool { pre := string(p) return strings.HasPrefix(path, pre) || path+"/" == pre } // Borrowed from: net/http/server.go // Return the canonical path for p, eliminating . and .. elements. func CleanURIPath(p string) string { if p == "" { return "/" } if p[0] != '/' { p = "/" + p } np := path.Clean(p) // path.Clean removes trailing slash except for root; // put the trailing slash back if necessary. if p[len(p)-1] == '/' && np != "/" { np += "/" } return np } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/zipartifacts/000077500000000000000000000000001265414173000270445ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/zipartifacts/codes.go000066400000000000000000000002221265414173000304640ustar00rootroot00000000000000package zipartifacts // These are exit codes used by subprocesses in cmd/gitlab-zip-xxx const ( StatusNotZip = 10 + iota StatusEntryNotFound ) gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/zipartifacts/entry.go000066400000000000000000000003461265414173000305370ustar00rootroot00000000000000package zipartifacts import ( "encoding/base64" ) func DecodeFileEntry(entry string) (string, error) { decoded, err := base64.StdEncoding.DecodeString(entry) if err != nil { return "", err } return string(decoded), nil } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/zipartifacts/metadata.go000066400000000000000000000055171265414173000311630ustar00rootroot00000000000000package zipartifacts import ( "archive/zip" "compress/gzip" "encoding/binary" "encoding/json" "io" "os" "path" "sort" "strconv" ) type metadata struct { Modified int64 `json:"modified,omitempty"` Mode string `json:"mode,omitempty"` CRC uint32 `json:"crc,omitempty"` Size uint64 `json:"size,omitempty"` Zipped uint64 `json:"zipped,omitempty"` Comment string `json:"comment,omitempty"` } const MetadataHeaderPrefix = "\x00\x00\x00&" // length of string below, encoded properly const MetadataHeader = "GitLab Build Artifacts Metadata 0.0.2\n" func newMetadata(file *zip.File) metadata { if file == nil { return metadata{} } return metadata{ Modified: file.ModTime().Unix(), Mode: strconv.FormatUint(uint64(file.Mode().Perm()), 8), CRC: file.CRC32, Size: file.UncompressedSize64, Zipped: file.CompressedSize64, Comment: file.Comment, } } func (m metadata) writeEncoded(output io.Writer) error { j, err := json.Marshal(m) if err != nil { return err } j = append(j, byte('\n')) return writeBytes(output, j) } func writeZipEntryMetadata(output io.Writer, path string, entry *zip.File) error { if err := writeString(output, path); err != nil { return err } if err := newMetadata(entry).writeEncoded(output); err != nil { return err } return nil } func generateZipMetadata(output io.Writer, archive *zip.Reader) error { if err := writeString(output, MetadataHeader); err != nil { return err } // Write empty error header that we may need in the future if err := writeString(output, "{}"); err != nil { return err } // Create map of files in zip archive zipMap := make(map[string]*zip.File, len(archive.File)) // Add missing entries for _, entry := range archive.File { zipMap[entry.Name] = entry for d := path.Dir(entry.Name); d != "." && d != "/"; d = path.Dir(d) { entryDir := d + "/" if _, ok := zipMap[entryDir]; !ok { zipMap[entryDir] = nil } } } // Sort paths sortedPaths := make([]string, 0, len(zipMap)) for path, _ := range zipMap { sortedPaths = append(sortedPaths, path) } sort.Strings(sortedPaths) // Write all files for _, path := range sortedPaths { if err := writeZipEntryMetadata(output, path, zipMap[path]); err != nil { return err } } return nil } func GenerateZipMetadataFromFile(fileName string, w io.Writer) error { archive, err := zip.OpenReader(fileName) if err != nil { // Ignore non-zip archives return os.ErrInvalid } defer archive.Close() gz := gzip.NewWriter(w) defer gz.Close() return generateZipMetadata(gz, &archive.Reader) } func writeBytes(output io.Writer, data []byte) error { err := binary.Write(output, binary.BigEndian, uint32(len(data))) if err == nil { _, err = output.Write(data) } return err } func writeString(output io.Writer, str string) error { return writeBytes(output, []byte(str)) } metadata_test.go000066400000000000000000000022221265414173000321310ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/internal/zipartifactspackage zipartifacts import ( "archive/zip" "bytes" "encoding/binary" "fmt" "testing" ) func TestMissingMetadataEntries(t *testing.T) { var zipBuffer, metaBuffer bytes.Buffer archive := zip.NewWriter(&zipBuffer) // non-POSIX paths are here just to test if we never enter infinite loop files := []string{"file1", "some/file/dir/", "some/file/dir/file2", "../../test12/test", "/usr/bin/test", `c:\windows\win32.exe`, `c:/windows/win.dll`, "./f/asd", "/"} for _, file := range files { archiveFile, err := archive.Create(file) if err != nil { t.Fatal(err) } fmt.Fprint(archiveFile, file) } archive.Close() zipReader := bytes.NewReader(zipBuffer.Bytes()) zipArchiveReader, _ := zip.NewReader(zipReader, int64(binary.Size(zipBuffer.Bytes()))) if err := generateZipMetadata(&metaBuffer, zipArchiveReader); err != nil { t.Fatal("zipartifacts: generateZipMetadata failed", err) } paths := []string{"file1", "some/", "some/file/", "some/file/dir/", "some/file/dir/file2"} for _, path := range paths { if !bytes.Contains(metaBuffer.Bytes(), []byte(path+"\x00")) { t.Fatal("zipartifacts: metadata for path", path, "not found") } } } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/main.go000066400000000000000000000060301265414173000237770ustar00rootroot00000000000000/* gitlab-workhorse handles slow requests for GitLab This HTTP server can service 'git clone', 'git push' etc. commands from Git clients that use the 'smart' Git HTTP protocol (git-upload-pack and git-receive-pack). It is intended to be deployed behind NGINX (for request routing and SSL termination) with access to a GitLab backend (for authentication and authorization) and local disk access to Git repositories managed by GitLab. In GitLab, this role was previously performed by gitlab-grack. In this file we start the web server and hand off to the upstream type. */ package main import ( "./internal/upstream" "flag" "fmt" "log" "net" "net/http" _ "net/http/pprof" "os" "syscall" "time" ) // Current version of GitLab Workhorse var Version = "(unknown version)" // Set at build time in the Makefile var printVersion = flag.Bool("version", false, "Print version and exit") var listenAddr = flag.String("listenAddr", "localhost:8181", "Listen address for HTTP server") var listenNetwork = flag.String("listenNetwork", "tcp", "Listen 'network' (tcp, tcp4, tcp6, unix)") var listenUmask = flag.Int("listenUmask", 022, "Umask for Unix socket, default: 022") var authBackend = URLFlag("authBackend", upstream.DefaultBackend, "Authentication/authorization backend") var authSocket = flag.String("authSocket", "", "Optional: Unix domain socket to dial authBackend at") var pprofListenAddr = flag.String("pprofListenAddr", "", "pprof listening address, e.g. 'localhost:6060'") var documentRoot = flag.String("documentRoot", "public", "Path to static files content") var proxyHeadersTimeout = flag.Duration("proxyHeadersTimeout", time.Minute, "How long to wait for response headers when proxying the request") var developmentMode = flag.Bool("developmentMode", false, "Allow to serve assets from Rails app") func main() { flag.Usage = func() { fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0]) fmt.Fprintf(os.Stderr, "\n %s [OPTIONS]\n\nOptions:\n", os.Args[0]) flag.PrintDefaults() } flag.Parse() version := fmt.Sprintf("gitlab-workhorse %s", Version) if *printVersion { fmt.Println(version) os.Exit(0) } log.Printf("Starting %s", version) // Good housekeeping for Unix sockets: unlink before binding if *listenNetwork == "unix" { if err := os.Remove(*listenAddr); err != nil && !os.IsNotExist(err) { log.Fatal(err) } } // Change the umask only around net.Listen() oldUmask := syscall.Umask(*listenUmask) listener, err := net.Listen(*listenNetwork, *listenAddr) syscall.Umask(oldUmask) if err != nil { log.Fatal(err) } // The profiler will only be activated by HTTP requests. HTTP // requests can only reach the profiler if we start a listener. So by // having no profiler HTTP listener by default, the profiler is // effectively disabled by default. if *pprofListenAddr != "" { go func() { log.Print(http.ListenAndServe(*pprofListenAddr, nil)) }() } up := upstream.NewUpstream( *authBackend, *authSocket, Version, *documentRoot, *developmentMode, *proxyHeadersTimeout, ) log.Fatal(http.Serve(listener, up)) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/main_test.go000066400000000000000000000462231265414173000250460ustar00rootroot00000000000000package main import ( "./internal/api" "./internal/helper" "./internal/testhelper" "./internal/upstream" "bytes" "encoding/base64" "encoding/json" "fmt" "io" "io/ioutil" "log" "mime/multipart" "net/http" "net/http/httptest" "os" "os/exec" "path" "regexp" "strings" "testing" "time" ) const scratchDir = "testdata/scratch" const testRepoRoot = "testdata/data" const testDocumentRoot = "testdata/public" const testRepo = "group/test.git" const testProject = "group/test" var checkoutDir = path.Join(scratchDir, "test") var cacheDir = path.Join(scratchDir, "cache") func TestAllowedClone(t *testing.T) { // Prepare clone directory if err := os.RemoveAll(scratchDir); err != nil { t.Fatal(err) } // Prepare test server and backend ts := testAuthServer(nil, 200, gitOkBody(t)) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() // Do the git clone cloneCmd := exec.Command("git", "clone", fmt.Sprintf("%s/%s", ws.URL, testRepo), checkoutDir) runOrFail(t, cloneCmd) // We may have cloned an 'empty' repository, 'git log' will fail in it logCmd := exec.Command("git", "log", "-1", "--oneline") logCmd.Dir = checkoutDir runOrFail(t, logCmd) } func TestDeniedClone(t *testing.T) { // Prepare clone directory if err := os.RemoveAll(scratchDir); err != nil { t.Fatal(err) } // Prepare test server and backend ts := testAuthServer(nil, 403, "Access denied") defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() // Do the git clone cloneCmd := exec.Command("git", "clone", fmt.Sprintf("%s/%s", ws.URL, testRepo), checkoutDir) out, err := cloneCmd.CombinedOutput() t.Logf("%s", out) if err == nil { t.Fatal("git clone should have failed") } } func TestAllowedPush(t *testing.T) { preparePushRepo(t) // Prepare the test server and backend ts := testAuthServer(nil, 200, gitOkBody(t)) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() // Perform the git push pushCmd := exec.Command("git", "push", fmt.Sprintf("%s/%s", ws.URL, testRepo), fmt.Sprintf("master:%s", newBranch())) pushCmd.Dir = checkoutDir runOrFail(t, pushCmd) } func TestDeniedPush(t *testing.T) { preparePushRepo(t) // Prepare the test server and backend ts := testAuthServer(nil, 403, "Access denied") defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() // Perform the git push pushCmd := exec.Command("git", "push", "-v", fmt.Sprintf("%s/%s", ws.URL, testRepo), fmt.Sprintf("master:%s", newBranch())) pushCmd.Dir = checkoutDir out, err := pushCmd.CombinedOutput() t.Logf("%s", out) if err == nil { t.Fatal("git push should have failed") } } func TestAllowedDownloadZip(t *testing.T) { prepareDownloadDir(t) // Prepare test server and backend archiveName := "foobar.zip" ts := testAuthServer(nil, 200, archiveOkBody(t, archiveName)) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() downloadCmd := exec.Command("curl", "-J", "-O", fmt.Sprintf("%s/%s/repository/archive.zip", ws.URL, testProject)) downloadCmd.Dir = scratchDir runOrFail(t, downloadCmd) extractCmd := exec.Command("unzip", archiveName) extractCmd.Dir = scratchDir runOrFail(t, extractCmd) } func TestAllowedDownloadTar(t *testing.T) { prepareDownloadDir(t) // Prepare test server and backend archiveName := "foobar.tar" ts := testAuthServer(nil, 200, archiveOkBody(t, archiveName)) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() downloadCmd := exec.Command("curl", "-J", "-O", fmt.Sprintf("%s/%s/repository/archive.tar", ws.URL, testProject)) downloadCmd.Dir = scratchDir runOrFail(t, downloadCmd) extractCmd := exec.Command("tar", "xf", archiveName) extractCmd.Dir = scratchDir runOrFail(t, extractCmd) } func TestAllowedDownloadTarGz(t *testing.T) { prepareDownloadDir(t) // Prepare test server and backend archiveName := "foobar.tar.gz" ts := testAuthServer(nil, 200, archiveOkBody(t, archiveName)) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() downloadCmd := exec.Command("curl", "-J", "-O", fmt.Sprintf("%s/%s/repository/archive.tar.gz", ws.URL, testProject)) downloadCmd.Dir = scratchDir runOrFail(t, downloadCmd) extractCmd := exec.Command("tar", "zxf", archiveName) extractCmd.Dir = scratchDir runOrFail(t, extractCmd) } func TestAllowedDownloadTarBz2(t *testing.T) { prepareDownloadDir(t) // Prepare test server and backend archiveName := "foobar.tar.bz2" ts := testAuthServer(nil, 200, archiveOkBody(t, archiveName)) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() downloadCmd := exec.Command("curl", "-J", "-O", fmt.Sprintf("%s/%s/repository/archive.tar.bz2", ws.URL, testProject)) downloadCmd.Dir = scratchDir runOrFail(t, downloadCmd) extractCmd := exec.Command("tar", "jxf", archiveName) extractCmd.Dir = scratchDir runOrFail(t, extractCmd) } func TestAllowedApiDownloadZip(t *testing.T) { prepareDownloadDir(t) // Prepare test server and backend archiveName := "foobar.zip" ts := testAuthServer(nil, 200, archiveOkBody(t, archiveName)) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() downloadCmd := exec.Command("curl", "-J", "-O", fmt.Sprintf("%s/api/v3/projects/123/repository/archive.zip", ws.URL)) downloadCmd.Dir = scratchDir runOrFail(t, downloadCmd) extractCmd := exec.Command("unzip", archiveName) extractCmd.Dir = scratchDir runOrFail(t, extractCmd) } func TestAllowedApiDownloadZipWithSlash(t *testing.T) { prepareDownloadDir(t) // Prepare test server and backend archiveName := "foobar.zip" ts := testAuthServer(nil, 200, archiveOkBody(t, archiveName)) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() // Use foo%2Fbar instead of a numeric ID downloadCmd := exec.Command("curl", "-J", "-O", fmt.Sprintf("%s/api/v3/projects/foo%%2Fbar/repository/archive.zip", ws.URL)) if !strings.Contains(downloadCmd.Args[3], `projects/foo%2Fbar/repository`) { t.Fatalf("Cannot find percent-2F: %v", downloadCmd.Args) } downloadCmd.Dir = scratchDir runOrFail(t, downloadCmd) extractCmd := exec.Command("unzip", archiveName) extractCmd.Dir = scratchDir runOrFail(t, extractCmd) } func TestDownloadCacheHit(t *testing.T) { prepareDownloadDir(t) // Prepare test server and backend archiveName := "foobar.zip" ts := testAuthServer(nil, 200, archiveOkBody(t, archiveName)) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() if err := os.MkdirAll(cacheDir, 0755); err != nil { t.Fatal(err) } cachedContent := []byte("cached") if err := ioutil.WriteFile(path.Join(cacheDir, archiveName), cachedContent, 0644); err != nil { t.Fatal(err) } downloadCmd := exec.Command("curl", "-J", "-O", fmt.Sprintf("%s/api/v3/projects/123/repository/archive.zip", ws.URL)) downloadCmd.Dir = scratchDir runOrFail(t, downloadCmd) actual, err := ioutil.ReadFile(path.Join(scratchDir, archiveName)) if err != nil { t.Fatal(err) } if bytes.Compare(actual, cachedContent) != 0 { t.Fatal("Unexpected file contents in download") } } func TestDownloadCacheCreate(t *testing.T) { prepareDownloadDir(t) // Prepare test server and backend archiveName := "foobar.zip" ts := testAuthServer(nil, 200, archiveOkBody(t, archiveName)) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() downloadCmd := exec.Command("curl", "-J", "-O", fmt.Sprintf("%s/api/v3/projects/123/repository/archive.zip", ws.URL)) downloadCmd.Dir = scratchDir runOrFail(t, downloadCmd) compareCmd := exec.Command("cmp", path.Join(cacheDir, archiveName), path.Join(scratchDir, archiveName)) if err := compareCmd.Run(); err != nil { t.Fatalf("Comparison between downloaded file and cache item failed: %s", err) } } func TestRegularProjectsAPI(t *testing.T) { apiResponse := "API RESPONSE" ts := testAuthServer(nil, 200, apiResponse) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() for _, resource := range []string{ "/api/v3/projects/123/repository/not/special", "/api/v3/projects/foo%2Fbar/repository/not/special", "/api/v3/projects/123/not/special", "/api/v3/projects/foo%2Fbar/not/special", } { resp, err := http.Get(ws.URL + resource) if err != nil { t.Fatal(err) } defer resp.Body.Close() buf := &bytes.Buffer{} if _, err := io.Copy(buf, resp.Body); err != nil { t.Error(err) } if buf.String() != apiResponse { t.Errorf("GET %q: Expected %q, got %q", resource, apiResponse, buf.String()) } if resp.StatusCode != 200 { t.Errorf("GET %q: expected 200, got %d", resource, resp.StatusCode) } } } func TestAllowedXSendfileDownload(t *testing.T) { contentFilename := "my-content" prepareDownloadDir(t) allowedXSendfileDownload(t, contentFilename, "foo/uploads/bar") } func TestDeniedXSendfileDownload(t *testing.T) { contentFilename := "my-content" prepareDownloadDir(t) deniedXSendfileDownload(t, contentFilename, "foo/uploads/bar") } func TestAllowedStaticFile(t *testing.T) { content := "PUBLIC" if err := setupStaticFile("static file.txt", content); err != nil { t.Fatalf("create public/static file.txt: %v", err) } proxied := false ts := testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) { proxied = true w.WriteHeader(404) }) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() for _, resource := range []string{ "/static%20file.txt", "/static file.txt", } { resp, err := http.Get(ws.URL + resource) if err != nil { t.Error(err) } defer resp.Body.Close() buf := &bytes.Buffer{} if _, err := io.Copy(buf, resp.Body); err != nil { t.Error(err) } if buf.String() != content { t.Errorf("GET %q: Expected %q, got %q", resource, content, buf.String()) } if resp.StatusCode != 200 { t.Errorf("GET %q: expected 200, got %d", resource, resp.StatusCode) } if proxied { t.Errorf("GET %q: should not have made it to backend", resource) } } } func TestStaticFileRelativeURL(t *testing.T) { content := "PUBLIC" if err := setupStaticFile("static.txt", content); err != nil { t.Fatalf("create public/static.txt: %v", err) } ts := testhelper.TestServerWithHandler(regexp.MustCompile(`.`), http.HandlerFunc(http.NotFound)) defer ts.Close() backendURLString := ts.URL + "/my-relative-url" log.Print(backendURLString) ws := startWorkhorseServer(backendURLString) defer ws.Close() resource := "/my-relative-url/static.txt" resp, err := http.Get(ws.URL + resource) if err != nil { t.Error(err) } defer resp.Body.Close() buf := &bytes.Buffer{} if _, err := io.Copy(buf, resp.Body); err != nil { t.Error(err) } if buf.String() != content { t.Errorf("GET %q: Expected %q, got %q", resource, content, buf.String()) } if resp.StatusCode != 200 { t.Errorf("GET %q: expected 200, got %d", resource, resp.StatusCode) } } func TestAllowedPublicUploadsFile(t *testing.T) { content := "PRIVATE but allowed" if err := setupStaticFile("uploads/static file.txt", content); err != nil { t.Fatalf("create public/uploads/static file.txt: %v", err) } proxied := false ts := testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) { proxied = true w.Header().Add("X-Sendfile", *documentRoot+r.URL.Path) w.WriteHeader(200) }) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() for _, resource := range []string{ "/uploads/static%20file.txt", "/uploads/static file.txt", } { resp, err := http.Get(ws.URL + resource) if err != nil { t.Fatal(err) } defer resp.Body.Close() buf := &bytes.Buffer{} if _, err := io.Copy(buf, resp.Body); err != nil { t.Fatal(err) } if buf.String() != content { t.Fatalf("GET %q: Expected %q, got %q", resource, content, buf.String()) } if resp.StatusCode != 200 { t.Fatalf("GET %q: expected 200, got %d", resource, resp.StatusCode) } if !proxied { t.Fatalf("GET %q: never made it to backend", resource) } } } func TestDeniedPublicUploadsFile(t *testing.T) { content := "PRIVATE" if err := setupStaticFile("uploads/static.txt", content); err != nil { t.Fatalf("create public/uploads/static.txt: %v", err) } proxied := false ts := testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, _ *http.Request) { proxied = true w.WriteHeader(404) }) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() for _, resource := range []string{ "/uploads/static.txt", "/uploads%2Fstatic.txt", } { resp, err := http.Get(ws.URL + resource) if err != nil { t.Fatal(err) } defer resp.Body.Close() buf := &bytes.Buffer{} if _, err := io.Copy(buf, resp.Body); err != nil { t.Fatal(err) } if buf.String() == content { t.Fatalf("GET %q: Got private file contents which should have been blocked by upstream", resource) } if resp.StatusCode != 404 { t.Fatalf("GET %q: expected 404, got %d", resource, resp.StatusCode) } if !proxied { t.Fatalf("GET %q: never made it to backend", resource) } } } func TestArtifactsUpload(t *testing.T) { reqBody := &bytes.Buffer{} writer := multipart.NewWriter(reqBody) file, err := writer.CreateFormFile("file", "my.file") if err != nil { t.Fatal(err) } fmt.Fprint(file, "SHOULD BE ON DISK, NOT IN MULTIPART") writer.Close() ts := testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) { if strings.HasSuffix(r.URL.Path, "/authorize") { if _, err := fmt.Fprintf(w, `{"TempPath":"%s"}`, scratchDir); err != nil { t.Fatal(err) } return } err := r.ParseMultipartForm(100000) if err != nil { t.Fatal(err) } nValues := 2 // filename + path for just the upload (no metadata because we are not POSTing a valid zip file) if len(r.MultipartForm.Value) != nValues { t.Errorf("Expected to receive exactly %d values", nValues) } if len(r.MultipartForm.File) != 0 { t.Error("Expected to not receive any files") } w.WriteHeader(200) }) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() resource := `/ci/api/v1/builds/123/artifacts` resp, err := http.Post(ws.URL+resource, writer.FormDataContentType(), reqBody) if err != nil { t.Error(err) } defer resp.Body.Close() if resp.StatusCode != 200 { t.Errorf("GET %q: expected 200, got %d", resource, resp.StatusCode) } } func TestArtifactsGetSingleFile(t *testing.T) { // We manually created this zip file in the gitlab-workhorse Git repository archivePath := `testdata/artifacts-archive.zip` fileName := "myfile" fileContents := "MY FILE" resourcePath := `/namespace/project/builds/123/artifacts/file/` + fileName ts := testhelper.TestServerWithHandler(regexp.MustCompile(`\A`+resourcePath+`\z`), func(w http.ResponseWriter, r *http.Request) { encodedFilename := base64.StdEncoding.EncodeToString([]byte(fileName)) if _, err := fmt.Fprintf(w, `{"Archive":"%s","Entry":"%s"}`, archivePath, encodedFilename); err != nil { t.Fatal(err) } return }) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() resp, err := http.Get(ws.URL + resourcePath) if err != nil { t.Error(err) } defer resp.Body.Close() if resp.StatusCode != 200 { t.Errorf("GET %q: expected 200, got %d", resourcePath, resp.StatusCode) } body, err := ioutil.ReadAll(resp.Body) if err != nil { t.Fatal(err) } if string(body) != fileContents { t.Fatalf("Expected file contents %q, got %q", fileContents, body) } } func TestGetGitBlob(t *testing.T) { blobId := "50b27c6518be44c42c4d87966ae2481ce895624c" // the LICENSE file in the test repository blobLength := 1075 headerKey := http.CanonicalHeaderKey("Gitlab-Workhorse-Send-Data") ts := testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) { responseJSON := fmt.Sprintf(`{"RepoPath":"%s","BlobId":"%s"}`, path.Join(testRepoRoot, testRepo), blobId) encodedJSON := base64.StdEncoding.EncodeToString([]byte(responseJSON)) w.Header().Set(headerKey, "git-blob:"+encodedJSON) // Prevent the Go HTTP server from setting the Content-Length to 0. w.Header().Set("Transfer-Encoding", "chunked") if _, err := fmt.Fprintf(w, "GNU General Public License"); err != nil { t.Fatal(err) } return }) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() resourcePath := "/something" resp, err := http.Get(ws.URL + resourcePath) if err != nil { t.Error(err) } defer resp.Body.Close() if resp.StatusCode != 200 { t.Errorf("GET %q: expected 200, got %d", resourcePath, resp.StatusCode) } if len(resp.Header[headerKey]) != 0 { t.Fatalf("Unexpected response header: %s: %q", headerKey, resp.Header.Get(headerKey)) } body, err := ioutil.ReadAll(resp.Body) if err != nil { t.Fatal(err) } if len(body) != blobLength { t.Fatalf("Expected body of %d bytes, got %d", blobLength, len(body)) } if !strings.HasPrefix(string(body), "The MIT License (MIT)") { t.Fatalf("Expected MIT license, got %q", body) } } func setupStaticFile(fpath, content string) error { cwd, err := os.Getwd() if err != nil { return err } *documentRoot = path.Join(cwd, testDocumentRoot) if err := os.MkdirAll(path.Join(*documentRoot, path.Dir(fpath)), 0755); err != nil { return err } static_file := path.Join(*documentRoot, fpath) if err := ioutil.WriteFile(static_file, []byte(content), 0666); err != nil { return err } return nil } func prepareDownloadDir(t *testing.T) { if err := os.RemoveAll(scratchDir); err != nil { t.Fatal(err) } if err := os.MkdirAll(scratchDir, 0755); err != nil { t.Fatal(err) } } func preparePushRepo(t *testing.T) { if err := os.RemoveAll(scratchDir); err != nil { t.Fatal(err) } cloneCmd := exec.Command("git", "clone", path.Join(testRepoRoot, testRepo), checkoutDir) runOrFail(t, cloneCmd) } func newBranch() string { return fmt.Sprintf("branch-%d", time.Now().UnixNano()) } func testAuthServer(url *regexp.Regexp, code int, body interface{}) *httptest.Server { return testhelper.TestServerWithHandler(url, func(w http.ResponseWriter, r *http.Request) { // Write pure string if data, ok := body.(string); ok { log.Println("UPSTREAM", r.Method, r.URL, code) w.WriteHeader(code) fmt.Fprint(w, data) return } // Write json string data, err := json.Marshal(body) if err != nil { log.Println("UPSTREAM", r.Method, r.URL, "FAILURE", err) w.WriteHeader(503) fmt.Fprint(w, err) return } log.Println("UPSTREAM", r.Method, r.URL, code) w.WriteHeader(code) w.Write(data) }) } func startWorkhorseServer(authBackend string) *httptest.Server { u := upstream.NewUpstream( helper.URLMustParse(authBackend), "", "123", testDocumentRoot, false, 0, ) return httptest.NewServer(u) } func runOrFail(t *testing.T, cmd *exec.Cmd) { out, err := cmd.CombinedOutput() t.Logf("%s", out) if err != nil { t.Fatal(err) } } func gitOkBody(t *testing.T) interface{} { return &api.Response{ GL_ID: "user-123", RepoPath: repoPath(t), } } func archiveOkBody(t *testing.T, archiveName string) interface{} { cwd, err := os.Getwd() if err != nil { t.Fatal(err) } archivePath := path.Join(cwd, cacheDir, archiveName) return &api.Response{ RepoPath: repoPath(t), ArchivePath: archivePath, CommitId: "c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd", ArchivePrefix: "foobar123", } } func repoPath(t *testing.T) string { cwd, err := os.Getwd() if err != nil { t.Fatal(err) } return path.Join(cwd, testRepoRoot, testRepo) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/proxy_test.go000066400000000000000000000061061265414173000252770ustar00rootroot00000000000000package main import ( "./internal/badgateway" "./internal/helper" "./internal/proxy" "./internal/testhelper" "bytes" "fmt" "io" "net" "net/http" "net/http/httptest" "regexp" "testing" "time" ) func newProxy(url string, rt *badgateway.RoundTripper) *proxy.Proxy { return proxy.NewProxy(helper.URLMustParse(url), "123", rt) } func TestProxyRequest(t *testing.T) { ts := testhelper.TestServerWithHandler(regexp.MustCompile(`/url/path\z`), func(w http.ResponseWriter, r *http.Request) { if r.Method != "POST" { t.Fatal("Expected POST request") } if r.Header.Get("Custom-Header") != "test" { t.Fatal("Missing custom header") } var body bytes.Buffer io.Copy(&body, r.Body) if body.String() != "REQUEST" { t.Fatal("Expected REQUEST in request body") } w.Header().Set("Custom-Response-Header", "test") w.WriteHeader(202) fmt.Fprint(w, "RESPONSE") }) httpRequest, err := http.NewRequest("POST", ts.URL+"/url/path", bytes.NewBufferString("REQUEST")) if err != nil { t.Fatal(err) } httpRequest.Header.Set("Custom-Header", "test") w := httptest.NewRecorder() newProxy(ts.URL, nil).ServeHTTP(w, httpRequest) testhelper.AssertResponseCode(t, w, 202) testhelper.AssertResponseBody(t, w, "RESPONSE") if w.Header().Get("Custom-Response-Header") != "test" { t.Fatal("Expected custom response header") } } func TestProxyError(t *testing.T) { httpRequest, err := http.NewRequest("POST", "/url/path", bytes.NewBufferString("REQUEST")) if err != nil { t.Fatal(err) } httpRequest.Header.Set("Custom-Header", "test") w := httptest.NewRecorder() newProxy("http://localhost:655575/", nil).ServeHTTP(w, httpRequest) testhelper.AssertResponseCode(t, w, 502) testhelper.AssertResponseBody(t, w, "dial tcp: invalid port 655575") } func TestProxyReadTimeout(t *testing.T) { ts := testhelper.TestServerWithHandler(nil, func(w http.ResponseWriter, r *http.Request) { time.Sleep(time.Minute) }) httpRequest, err := http.NewRequest("POST", "http://localhost/url/path", nil) if err != nil { t.Fatal(err) } rt := &badgateway.RoundTripper{ Transport: &http.Transport{ Proxy: http.ProxyFromEnvironment, Dial: (&net.Dialer{ Timeout: 30 * time.Second, KeepAlive: 30 * time.Second, }).Dial, TLSHandshakeTimeout: 10 * time.Second, ResponseHeaderTimeout: time.Millisecond, }, } p := newProxy(ts.URL, rt) w := httptest.NewRecorder() p.ServeHTTP(w, httpRequest) testhelper.AssertResponseCode(t, w, 502) testhelper.AssertResponseBody(t, w, "net/http: timeout awaiting response headers") } func TestProxyHandlerTimeout(t *testing.T) { ts := testhelper.TestServerWithHandler(nil, http.TimeoutHandler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { time.Sleep(time.Second) }), time.Millisecond, "Request took too long").ServeHTTP, ) httpRequest, err := http.NewRequest("POST", "http://localhost/url/path", nil) if err != nil { t.Fatal(err) } w := httptest.NewRecorder() newProxy(ts.URL, nil).ServeHTTP(w, httpRequest) testhelper.AssertResponseCode(t, w, 503) testhelper.AssertResponseBody(t, w, "Request took too long") } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/sendfile_test.go000066400000000000000000000057701265414173000257150ustar00rootroot00000000000000package main import ( "bytes" "fmt" "io/ioutil" "log" "net/http" "net/http/httptest" "os" "os/exec" "path" "testing" ) func TestDeniedLfsDownload(t *testing.T) { contentFilename := "b68143e6463773b1b6c6fd009a76c32aeec041faff32ba2ed42fd7f708a17f80" url := fmt.Sprintf("gitlab-lfs/objects/%s", contentFilename) prepareDownloadDir(t) deniedXSendfileDownload(t, contentFilename, url) } func TestAllowedLfsDownload(t *testing.T) { contentFilename := "b68143e6463773b1b6c6fd009a76c32aeec041faff32ba2ed42fd7f708a17f80" url := fmt.Sprintf("gitlab-lfs/objects/%s", contentFilename) prepareDownloadDir(t) allowedXSendfileDownload(t, contentFilename, url) } func allowedXSendfileDownload(t *testing.T, contentFilename string, filePath string) { contentPath := path.Join(cacheDir, contentFilename) prepareDownloadDir(t) // Prepare test server and backend ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { log.Println("UPSTREAM", r.Method, r.URL) if xSendfileType := r.Header.Get("X-Sendfile-Type"); xSendfileType != "X-Sendfile" { t.Fatalf(`X-Sendfile-Type want "X-Sendfile" got %q`, xSendfileType) } w.Header().Set("X-Sendfile", contentPath) w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, contentFilename)) w.Header().Set("Content-Type", fmt.Sprintf(`application/octet-stream`)) w.WriteHeader(200) })) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() if err := os.MkdirAll(cacheDir, 0755); err != nil { t.Fatal(err) } contentBytes := []byte("content") if err := ioutil.WriteFile(contentPath, contentBytes, 0644); err != nil { t.Fatal(err) } downloadCmd := exec.Command("curl", "-J", "-O", fmt.Sprintf("%s/%s", ws.URL, filePath)) downloadCmd.Dir = scratchDir runOrFail(t, downloadCmd) actual, err := ioutil.ReadFile(path.Join(scratchDir, contentFilename)) if err != nil { t.Fatal(err) } if bytes.Compare(actual, contentBytes) != 0 { t.Fatal("Unexpected file contents in download") } } func deniedXSendfileDownload(t *testing.T, contentFilename string, filePath string) { prepareDownloadDir(t) // Prepare test server and backend ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { log.Println("UPSTREAM", r.Method, r.URL) if xSendfileType := r.Header.Get("X-Sendfile-Type"); xSendfileType != "X-Sendfile" { t.Fatalf(`X-Sendfile-Type want "X-Sendfile" got %q`, xSendfileType) } w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, contentFilename)) w.WriteHeader(200) fmt.Fprint(w, "Denied") })) defer ts.Close() ws := startWorkhorseServer(ts.URL) defer ws.Close() downloadCmd := exec.Command("curl", "-J", "-O", fmt.Sprintf("%s/%s", ws.URL, filePath)) downloadCmd.Dir = scratchDir runOrFail(t, downloadCmd) actual, err := ioutil.ReadFile(path.Join(scratchDir, contentFilename)) if err != nil { t.Fatal(err) } if bytes.Compare(actual, []byte("Denied")) != 0 { t.Fatal("Unexpected file contents in download") } } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/support/000077500000000000000000000000001265414173000242415ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/support/fake-auth-backend.go000066400000000000000000000006201265414173000300200ustar00rootroot00000000000000package main import ( "fmt" "log" "net/http" "os" ) func main() { if len(os.Args) == 1 { fmt.Fprintf(os.Stderr, "Usage: %s /path/to/test-repo.git\n", os.Args[0]) os.Exit(1) } http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { fmt.Fprintf(w, `{"RepoPath":"%s","ArchivePath":"%s"}`, os.Args[1], r.URL.Path) }) log.Fatal(http.ListenAndServe("localhost:8080", nil)) } gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/support/path000077500000000000000000000000541265414173000251220ustar00rootroot00000000000000#!/bin/sh exec env PATH=$(pwd):${PATH} "$@" gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/testdata/000077500000000000000000000000001265414173000243365ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/testdata/.gitkeep000066400000000000000000000000001265414173000257550ustar00rootroot00000000000000gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/testdata/artifacts-archive.zip000066400000000000000000000001671265414173000304650ustar00rootroot00000000000000PK<“3HD¹¾É myfileóTpóôqPK<“3HD¹¾É  myfilePK4-gitlab-workhorse-0.6.3-fb0deba81299ab345b6de447113139ce6bf5982b/urlflag.go000066400000000000000000000005441265414173000245130ustar00rootroot00000000000000package main import ( "flag" "net/url" ) type urlFlag struct { *url.URL } func (u *urlFlag) Set(s string) error { myURL, err := url.Parse(s) if err != nil { return err } u.URL = myURL return nil } func URLFlag(name string, value *url.URL, usage string) **url.URL { f := &urlFlag{value} flag.CommandLine.Var(f, name, usage) return &f.URL }