|
2 | 2 | // Licensed under the GNU Affero General Public License (AGPL).
|
3 | 3 | // See License.AGPL.txt in the project root for license information.
|
4 | 4 |
|
5 |
| -package workspacedownload |
| 5 | +package frontend_dev |
6 | 6 |
|
7 | 7 | import (
|
| 8 | + "bytes" |
8 | 9 | "fmt"
|
| 10 | + "io" |
9 | 11 | "net/http"
|
10 | 12 | "net/http/httputil"
|
11 | 13 | "net/url"
|
12 | 14 | "os"
|
| 15 | + "regexp" |
13 | 16 | "strings"
|
14 | 17 |
|
15 | 18 | "github.com/caddyserver/caddy/v2"
|
@@ -60,29 +63,88 @@ func (m Config) ServeHTTP(w http.ResponseWriter, r *http.Request, next caddyhttp
|
60 | 63 | return caddyhttp.Error(http.StatusInternalServerError, fmt.Errorf("unexpected error forwarding to dev URL"))
|
61 | 64 | }
|
62 | 65 |
|
63 |
| - targetQuery := devURL.RawQuery |
64 |
| - director := func(req *http.Request) { |
65 |
| - req.URL.Scheme = devURL.Scheme |
66 |
| - req.URL.Host = devURL.Host |
67 |
| - req.Host = devURL.Host // override host header so target proxy can handle this request properly |
68 |
| - |
69 |
| - req.URL.Path, req.URL.RawPath = joinURLPath(devURL, req.URL) |
70 |
| - if targetQuery == "" || req.URL.RawQuery == "" { |
71 |
| - req.URL.RawQuery = targetQuery + req.URL.RawQuery |
72 |
| - } else { |
73 |
| - req.URL.RawQuery = targetQuery + "&" + req.URL.RawQuery |
74 |
| - } |
75 |
| - if _, ok := req.Header["User-Agent"]; !ok { |
76 |
| - // explicitly disable User-Agent so it's not set to default value |
77 |
| - req.Header.Set("User-Agent", "") |
78 |
| - } |
79 |
| - } |
80 |
| - proxy := httputil.ReverseProxy{Director: director} |
| 66 | + // targetQuery := devURL.RawQuery |
| 67 | + // director := func(req *http.Request) { |
| 68 | + // req.URL.Scheme = devURL.Scheme |
| 69 | + // req.URL.Host = devURL.Host |
| 70 | + // req.Host = devURL.Host // override host header so target proxy can handle this request properly |
| 71 | + |
| 72 | + // req.URL.Path, req.URL.RawPath = joinURLPath(devURL, req.URL) |
| 73 | + // if targetQuery == "" || req.URL.RawQuery == "" { |
| 74 | + // req.URL.RawQuery = targetQuery + req.URL.RawQuery |
| 75 | + // } else { |
| 76 | + // req.URL.RawQuery = targetQuery + "&" + req.URL.RawQuery |
| 77 | + // } |
| 78 | + // if _, ok := req.Header["User-Agent"]; !ok { |
| 79 | + // // explicitly disable User-Agent so it's not set to default value |
| 80 | + // req.Header.Set("User-Agent", "") |
| 81 | + // } |
| 82 | + // } |
| 83 | + proxy := httputil.ReverseProxy{Transport: &RedirectingTransport{baseUrl: devURL}} |
81 | 84 | proxy.ServeHTTP(w, r)
|
82 | 85 |
|
83 | 86 | return nil
|
84 | 87 | }
|
85 | 88 |
|
| 89 | +type RedirectingTransport struct { |
| 90 | + baseUrl *url.URL |
| 91 | +} |
| 92 | + |
| 93 | +func (rt *RedirectingTransport) RoundTrip(req *http.Request) (*http.Response, error) { |
| 94 | + resp, err := http.DefaultTransport.RoundTrip(req) |
| 95 | + if err != nil { |
| 96 | + return nil, err |
| 97 | + } |
| 98 | + |
| 99 | + // gpl: Do we have better means to avoid checking the body? |
| 100 | + if resp.StatusCode < 300 { |
| 101 | + modifiedResp := MatchAndRewriteRootRequest(resp, rt.baseUrl) |
| 102 | + if modifiedResp != nil { |
| 103 | + return modifiedResp, nil |
| 104 | + } |
| 105 | + } |
| 106 | + |
| 107 | + return resp, nil |
| 108 | +} |
| 109 | + |
| 110 | +func MatchAndRewriteRootRequest(or *http.Response, baseUrl *url.URL) *http.Response { |
| 111 | + // match index.html? |
| 112 | + prefix := []byte("<!doctype html>") |
| 113 | + var buf bytes.Buffer |
| 114 | + bodyReader := io.TeeReader(or.Body, &buf) |
| 115 | + prefixBuf := make([]byte, len(prefix)) |
| 116 | + _, err := io.ReadAtLeast(bodyReader, prefixBuf, len(prefix)) |
| 117 | + if err != nil { |
| 118 | + caddy.Log().Sugar().Warnf("prefix match: can't read response body: %w", err) |
| 119 | + return nil |
| 120 | + } |
| 121 | + if !bytes.Equal(prefix, prefixBuf) { |
| 122 | + return nil |
| 123 | + } |
| 124 | + |
| 125 | + caddy.Log().Sugar().Infof("match index.html") |
| 126 | + _, err = io.Copy(&buf, or.Body) |
| 127 | + if err != nil { |
| 128 | + caddy.Log().Sugar().Errorf("unable to copy response body: %w, path: %s", err, or.Request.URL.Path) |
| 129 | + return nil |
| 130 | + } |
| 131 | + fullBody := buf.String() |
| 132 | + |
| 133 | + mainJs := regexp.MustCompile(`"[^"]+?main\.[0-9a-z]+\.js"`) |
| 134 | + fullBody = mainJs.ReplaceAllStringFunc(fullBody, func(s string) string { |
| 135 | + return fmt.Sprintf(`"%s/static/js/main.js"`, baseUrl.String()) |
| 136 | + }) |
| 137 | + |
| 138 | + mainCss := regexp.MustCompile(`<link[^>]+?rel="stylesheet">`) |
| 139 | + fullBody = mainCss.ReplaceAllString(fullBody, "") |
| 140 | + |
| 141 | + hrefs := regexp.MustCompile(`href="/`) |
| 142 | + fullBody = hrefs.ReplaceAllString(fullBody, fmt.Sprintf(`href="%s/`, baseUrl.String())) |
| 143 | + |
| 144 | + or.Body = io.NopCloser(strings.NewReader(fullBody)) |
| 145 | + return or |
| 146 | +} |
| 147 | + |
86 | 148 | func joinURLPath(a, b *url.URL) (path, rawpath string) {
|
87 | 149 | if a.RawPath == "" && b.RawPath == "" {
|
88 | 150 | return singleJoiningSlash(a.Path, b.Path), ""
|
|
0 commit comments