douchui1657 2018-02-10 01:17
浏览 76
已采纳

Golang Cookie的时间戳为空

I'm wirting a go programm that needs to login to a website to get some data. The login process is done, but now i have the problem that i don't get access to the protected page with the cookie i got from the login form. After examing them and comparing to the ones my browser gets, i noticed that my program gets cookies with "empty" timestamps. Can someone point out, how i get the cookie with a correct timestamp? That would be fantastic.

This is my code:

package main

import (
    "fmt"
    "html"
    "io/ioutil"
    "log"
    "net/http"
    "net/http/cookiejar"
    "net/url"
    "regexp"
    "strings"
    "time"
)

var CookieJar *cookiejar.Jar
var httpClient *http.Client

func dbgPrintCurCookies(CurCookies []*http.Cookie) {
    var cookieNum int = len(CurCookies)
    log.Printf("cookieNum=%d", cookieNum)
    for i := 0; i < cookieNum; i++ {
        var curCk *http.Cookie = CurCookies[i]
        //log.Printf("curCk.Raw=%s", curCk.Raw)
        log.Printf("Cookie [%d]", i)
        log.Printf("Name\t=%s", curCk.Name)
        log.Printf("Value\t=%s", curCk.Value)
        log.Printf("Path\t=%s", curCk.Path)
        log.Printf("Domain\t=%s", curCk.Domain)
        log.Printf("Expires\t=%s", curCk.Expires)
        log.Printf("RawExpires=%s", curCk.RawExpires)
        log.Printf("MaxAge\t=%d", curCk.MaxAge)
        log.Printf("Secure\t=%t", curCk.Secure)
        log.Printf("HttpOnly=%t", curCk.HttpOnly)
        log.Printf("Raw\t=%s", curCk.Raw)
        log.Printf("Unparsed=%s", curCk.Unparsed)
    }
}

func main() {
    CookieJar, _ = cookiejar.New(nil)

    httpClient := &http.Client{
        Jar:     CookieJar,
        Timeout: 10 * time.Second,
        CheckRedirect: func(req *http.Request, via []*http.Request) error {
            return http.ErrUseLastResponse
        }}

    LSFRedirURL := ""
    pw := "
    us := ""
    LoginURL := "login website ?j_password=" + pw + "&j_username=" + us
    GetFinalCookieURL := ""



    //get first cookie
    nextURL := LSFRedirURL
    for i := 0; i < 10; i++ {
        resp, _ := httpClient.Get(nextURL)
        // fmt.Println("StatusCode:", resp.StatusCode)
        // fmt.Println(resp.Request.URL)
        if resp.StatusCode == 200 {
            // fmt.Println("Done!")
            break
        } else {
            nextURL = resp.Header.Get("Location")
        }
    }

    //safe first cookie
    url1, _ := url.Parse("first cookie website")
    firstCookie := CookieJar.Cookies(url1)[0]
    fmt.Println("First Cookie :\\)")

    //getting second cookie and params
    // var cam []string
    var resp *http.Response
    nextURL = LoginURL
    for i := 0; i < 10; i++ {
        resp, _ = httpClient.Post(nextURL, "", nil)
        // fmt.Println("StatusCode:", resp.StatusCode)
        // fmt.Println(resp.Request.URL)
        // cam = append(cam, nextURL)
        if resp.StatusCode == 200 {
            fmt.Println("Done!")
            break
        } else {
            nextURL = resp.Header.Get("Location")
        }
    }

    //second cookie
    url2, _ := url.Parse("website second cookie is from")
    secondCookie := CookieJar.Cookies(url2)[0]
    fmt.Println("Second Cookie :\\)")

    //params
    defer resp.Body.Close()
    c, _ := ioutil.ReadAll(resp.Body)
    data := html.UnescapeString(string(c))
    //fmt.Println(data)
    getvalue := regexp.MustCompile("value=\".*\"")
    values := getvalue.FindAllStringSubmatch(data, -1)
    values[0][0] = strings.TrimSuffix(values[0][0], "\"")
    values[0][0] = strings.TrimPrefix(values[0][0], "value=\"")
    values[1][0] = strings.TrimSuffix(values[1][0], "\"")
    values[1][0] = strings.TrimPrefix(values[1][0], "value=\"")

    v := url.Values{
        "SAMLResponse": {values[1][0]},
        "RelayState":   {values[0][0]},
    }

    body := strings.NewReader(v.Encode())

    fmt.Println("Values :\\)")

    //adding values and cookies to request
    req, _ := http.NewRequest("POST", GetFinalCookieURL, body)
    req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
    req.AddCookie(firstCookie)
    req.AddCookie(secondCookie)
    resp, _ = httpClient.Do(req)

    //we got the real cookie
    url3, _ := url.Parse("website i get the cookies for")
    dbgPrintCurCookies(CookieJar.Cookies(url3))
    finalCookie := CookieJar.Cookies(url3)[0]
    finalCookie2 := CookieJar.Cookies(url3)[1]



    fmt.Println("StatusCode:", resp.StatusCode)
    fmt.Println(resp.Request.URL)
    nextURL = resp.Header.Get("Location")
    fmt.Println(nextURL)

    nextURL = "website i need the cookies for"

    req, _ = http.NewRequest("GET", nextURL, nil)
    req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
    req.AddCookie(finalCookie)
    req.AddCookie(finalCookie2)
    resp, _ = httpClient.Do(req)
    url3, _ = url.Parse("final cookie website")
    dbgPrintCurCookies(CookieJar.Cookies(url3))
    fmt.Println(resp.StatusCode)
    fmt.Println(resp.Request.URL)

    defer resp.Body.Close()
    data3, _ := ioutil.ReadAll(resp.Body)
    fmt.Println(string(data3))

}

And this is an example of what the cookie i get looks like:

2018/02/10 01:55:48 cookieNum=2
2018/02/10 01:55:48 Cookie [0]
2018/02/10 01:55:48 Name    =JSESSIONID
2018/02/10 01:55:48 Value   =86E2C361905167A1F64FC45C400649F2.stupo1
2018/02/10 01:55:48 Path    =
2018/02/10 01:55:48 Domain  =
2018/02/10 01:55:48 Expires =0001-01-01 00:00:00 +0000 UTC
2018/02/10 01:55:48 RawExpires=
2018/02/10 01:55:48 MaxAge  =0
2018/02/10 01:55:48 Secure  =false
2018/02/10 01:55:48 HttpOnly=false
2018/02/10 01:55:48 Raw =
2018/02/10 01:55:48 Unparsed=[]

Edit: Added the complete code. And this is the cookie in the Browser: Cookie in Browser

  • 写回答

1条回答 默认 最新

  • duananyu9231 2018-02-10 09:22
    关注

    Cookies come in two flavors: The once you receive in a Set-Cookie header and the ones you send in a Cookie header. Only Set-Cookie header cookies have expiration times and various fields while Cookie header type cookies are plain name,value tuples. and that's what a cookiejar returns because that's what included in the Cookie header.

    The default cookie jar implementation of the stdlib does not provide a mechanism to extract all cookies or fields other than name and value. If you need that information use any other open source cookie jar implementation.

    (Note that the whole purpose of the stdlib cookie jar implementation is to transparently handle cookies, even through redirections. It is not a solution to collect information about incomming Set-Cookie header and the various values sent in them.)

    本回答被题主选为最佳回答 , 对您是否有帮助呢?
    评论

报告相同问题?

悬赏问题

  • ¥15 R语言Rstudio突然无法启动
  • ¥15 关于#matlab#的问题:提取2个图像的变量作为另外一个图像像元的移动量,计算新的位置创建新的图像并提取第二个图像的变量到新的图像
  • ¥15 改算法,照着压缩包里边,参考其他代码封装的格式 写到main函数里
  • ¥15 用windows做服务的同志有吗
  • ¥60 求一个简单的网页(标签-安全|关键词-上传)
  • ¥35 lstm时间序列共享单车预测,loss值优化,参数优化算法
  • ¥15 Python中的request,如何使用ssr节点,通过代理requests网页。本人在泰国,需要用大陆ip才能玩网页游戏,合法合规。
  • ¥100 为什么这个恒流源电路不能恒流?
  • ¥15 有偿求跨组件数据流路径图
  • ¥15 写一个方法checkPerson,入参实体类Person,出参布尔值