Exemplo n.º 1
0
// Parses the xml and returns what we need from the xml
func (episode *CrunchyrollEpisode) GetEpisodeInfo(quality string, cookies []*http.Cookie) error {
	episode.Quality = quality // Sets the quality to the passed quality string

	// Gets the HTML of the episode page
	episodeReqHeaders := http.Header{}
	episodeReqHeaders.Add("referer", "http://www.crunchyroll.com/"+strings.Split(episode.Path, "/")[1])
	episodeResponse, err := anirip.GetHTTPResponse("GET",
		episode.URL,
		nil,
		episodeReqHeaders,
		cookies)
	if err != nil {
		return err
	}

	// Creates the goquery document that will be used to scrape for episode metadata
	episodeDoc, err := goquery.NewDocumentFromResponse(episodeResponse)
	if err != nil {
		return anirip.Error{Message: "There was an error while reading the episode doc", Err: err}
	}

	// Scrapes the episode metadata from the episode page
	episodeMetaDataJSON := episodeDoc.Find("script#liftigniter-metadata").First().Text()

	// Parses the metadata json to a MetaData object
	episodeMetaData := new(EpisodeMetaData)
	if err := json.Unmarshal([]byte(episodeMetaDataJSON), episodeMetaData); err != nil {
		return anirip.Error{Message: "There was an error while parsing episode metadata", Err: err}
	}

	// Formdata to indicate the source page
	formData := url.Values{
		"current_page": {episode.URL},
	}

	// Querystring for getting the crunchyroll standard config
	queryString := url.Values{
		"req":           {"RpcApiVideoPlayer_GetStandardConfig"},
		"media_id":      {strconv.Itoa(episode.ID)},
		"video_format":  {getVideoFormat(episode.Quality)},
		"video_quality": {getVideoQuality(episode.Quality)},
		"auto_play":     {"1"},
		"aff":           {"crunchyroll-website"},
		"show_pop_out_controls":   {"1"},
		"pop_out_disable_message": {""},
		"click_through":           {"0"},
	}

	// Performs the HTTP Request that will get the XML
	standardConfigReqHeaders := http.Header{}
	standardConfigReqHeaders.Add("Host", "www.crunchyroll.com")
	standardConfigReqHeaders.Add("Origin", "http://static.ak.crunchyroll.com")
	standardConfigReqHeaders.Add("Content-type", "application/x-www-form-urlencoded")
	standardConfigReqHeaders.Add("Referer", "http://static.ak.crunchyroll.com/versioned_assets/StandardVideoPlayer.f3770232.swf")
	standardConfigReqHeaders.Add("X-Requested-With", "ShockwaveFlash/22.0.0.192")
	standardConfigResponse, err := anirip.GetHTTPResponse("POST",
		"http://www.crunchyroll.com/xml/?"+queryString.Encode(),
		bytes.NewBufferString(formData.Encode()),
		standardConfigReqHeaders,
		cookies)
	if err != nil {
		return err
	}

	// Gets the xml string from the recieved xml response body
	standardConfigResponseBody, err := ioutil.ReadAll(standardConfigResponse.Body)
	if err != nil {
		return anirip.Error{Message: "There was an error reading the xml response", Err: err}
	}
	xmlString := string(standardConfigResponseBody)

	// Checks for an unsupported region first
	if strings.Contains(xmlString, "<code>") && strings.Contains(xmlString, "</code>") {
		if strings.SplitN(strings.SplitN(xmlString, "<code>", 2)[1], "</code>", 2)[0] == "4" {
			return anirip.Error{Message: "This video is not available in your region", Err: err}
		}
	}

	// Next performs some really basic parsing of the host url
	xmlHostURL := ""
	if strings.Contains(xmlString, "<host>") && strings.Contains(xmlString, "</host>") {
		xmlHostURL = strings.SplitN(strings.SplitN(xmlString, "<host>", 2)[1], "</host>", 2)[0]
	} else {
		return anirip.Error{Message: "No hosts were found for the episode", Err: err}
	}

	// Same type of xml parsing to get the file
	episodeFile := ""
	if strings.Contains(xmlString, "<file>") && strings.Contains(xmlString, "</file>") {
		episodeFile = strings.SplitN(strings.SplitN(xmlString, "<file>", 2)[1], "</file>", 2)[0]
	} else {
		return anirip.Error{Message: "No hosts were found for the episode", Err: err}
	}

	// Parses the URL in order to break out the two urls required for dumping
	url, err := url.Parse(xmlHostURL)
	if err != nil {
		return anirip.Error{Message: "There was an error parsing episode information", Err: err}
	}

	// Sets the RTMP info recieved before returning
	episode.Title = episodeMetaData.Name
	episode.FileName = anirip.CleanFileName(episode.FileName + episode.Title) // Updates filename with title that we just scraped
	episode.MediaInfo = RTMPInfo{
		File:   episodeFile,
		URLOne: url.Scheme + "://" + url.Host + url.Path,
		URLTwo: strings.Trim(url.RequestURI(), "/"),
	}
	return nil
}
Exemplo n.º 2
0
// Parses the xml and returns what we need from the xml
func (episode *DaisukiEpisode) GetEpisodeInfo(quality string, cookies []*http.Cookie) error {
	episode.Quality = quality // Sets the quality to the passed quality string

	// Gets the HTML of the episode page
	episodeReqHeaders := http.Header{}
	episodeReqHeaders.Add("referer", "http://www.daisuki.net/us/en/anime/detail."+strings.Split(episode.Path, ".")[1]+".html")
	episodeResponse, err := anirip.GetHTTPResponse("GET",
		episode.URL,
		nil,
		episodeReqHeaders,
		cookies)
	if err != nil {
		return err
	}

	// Creates the goquery document that will be used to scrape for episode info
	episodeDoc, err := goquery.NewDocumentFromResponse(episodeResponse)
	if err != nil {
		return anirip.Error{Message: "There was an error while reading the episode doc", Err: err}
	}

	flashVars := map[string]string{}
	episodeDoc.Find("div#movieFlash").Each(func(i int, flash *goquery.Selection) {
		// Finds the movie/large episode and adds it to our map
		flashVarString := flash.Find("script").First().Text()
		flashVarString = strings.SplitN(flashVarString, "}", 2)[0]
		flashVarString = strings.SplitN(flashVarString, "{", 2)[1]
		flashVarString = strings.Replace(flashVarString, "'", "\"", -1)
		flashVarString = strings.Replace(flashVarString, " ", "", -1)
		flashVarString = strings.Replace(flashVarString, "\n", "", -1)
		flashVarsArray := strings.Split(flashVarString, "\"")
		newFlashVarsArray := []string{}
		for f := 1; f < len(flashVarsArray)-1; f++ {
			if !strings.Contains(flashVarsArray[f], ":") && !strings.Contains(flashVarsArray[f], ",") {
				newFlashVarsArray = append(newFlashVarsArray, flashVarsArray[f])
			}
		}
		// Declares and fills our map with all the key,values needed from flashvars
		var e = 0
		for e < len(newFlashVarsArray) {
			flashVars[newFlashVarsArray[e]] = newFlashVarsArray[e+1]
			e = e + 2
		}
	})

	// Check for required fields in flashVars map
	if _, ok := flashVars["s"]; !ok {
		return anirip.Error{Message: "'s' was missing from flashvars", Err: err}
	}
	if _, ok := flashVars["country"]; !ok {
		return anirip.Error{Message: "'country' was missing from flashvars", Err: err}
	}
	if _, ok := flashVars["init"]; !ok {
		return anirip.Error{Message: "'init' was missing from flashvars", Err: err}
	}

	// Gets the current time which we will use in our flashvars request
	nowMillis := strconv.FormatInt(time.Now().UnixNano()/1000000, 10)

	// Performs the HTTP Request that will get the country code
	countryReqHeaders := http.Header{}
	countryReqHeaders.Add("referer", "https://www.crunchyroll.com/login")
	countryReqHeaders.Add("content-type", "application/x-www-form-urlencoded")
	countryResponse, err := anirip.GetHTTPResponse("GET",
		"http://www.daisuki.net"+flashVars["country"]+"?cashPath="+nowMillis,
		nil,
		countryReqHeaders,
		cookies)
	if err != nil {
		return err
	}

	// Reads the body and extracts the country code which will be used in later requests
	body, err := ioutil.ReadAll(countryResponse.Body)
	if err != nil {
		return anirip.Error{Message: "There was an error getting the country code", Err: err}
	}
	countryCode := strings.Split(strings.Split(string(body), "<country_code>")[1], "</country_code>")[0]

	// Generates a new random 256bit key
	key := make([]byte, 32)
	if _, err = io.ReadFull(crand.Reader, key); err != nil {
		return anirip.Error{Message: "There was an error generating 256 bit key", Err: err}
	}

	api := new(ApiData)
	if val, ok := flashVars["ss_id"]; ok {
		api.SS_ID = val
	}
	if val, ok := flashVars["mv_id"]; ok {
		api.MV_ID = val
	}
	if val, ok := flashVars["device_cd"]; ok {
		api.Device_CD = val
	}
	if val, ok := flashVars["ss1_prm"]; ok {
		api.SS1_PRM = val
	}
	if val, ok := flashVars["ss2_prm"]; ok {
		api.SS2_PRM = val
	}
	if val, ok := flashVars["ss3_prm"]; ok {
		api.SS3_PRM = val
	}
	plainApiJSON, err := json.Marshal(api)
	if err != nil {
		return anirip.Error{Message: "There was an error marshalling api json", Err: err}
	}

	// Pads plainApiJSON to equal full BlockSize if not full block multiple
	if len(plainApiJSON)%aes.BlockSize != 0 {
		padding := aes.BlockSize - (len(plainApiJSON) % aes.BlockSize)
		paddedJSON := make([]byte, len(plainApiJSON)+padding)
		for p, b := range plainApiJSON {
			paddedJSON[p] = b
		}
		plainApiJSON = paddedJSON
	}

	// Creates a new aes cipher using the key we generated
	cipherBlock, err := aes.NewCipher(key)
	if err != nil {
		return anirip.Error{Message: "There was an error generating aes cipherblock", Err: err}
	}

	cipherApiJSON := make([]byte, len(plainApiJSON))
	iv := make([]byte, aes.BlockSize)
	mode := cipher.NewCBCEncrypter(cipherBlock, iv)
	mode.CryptBlocks(cipherApiJSON, plainApiJSON)

	// Key uused to re-encrypt our request data to daisuki
	var pemPublicKey = "-----BEGIN PUBLIC KEY-----\n" +
		"MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDFUkwl6OFLNms3VJQL7rb5bLfi\n" +
		"/u8Lkyx2WaDFw78XPWAkZMLfc9aTtROuBv8b6PNnUpqzC/lpxWQFIhgfKgxR6lRq\n" +
		"4SDT2NkIWV5O/3ZbOJzeCAoe9/G7+wdBHMVo23O39SHO3ycMv74N28KbGsnQ8tj0\n" +
		"NZCYyv/ubQeRUCAHfQIDAQAB\n" +
		"-----END PUBLIC KEY-----"

	pemBlock, _ := pem.Decode([]byte(pemPublicKey))
	pub, err := x509.ParsePKIXPublicKey(pemBlock.Bytes)
	if err != nil {
		return anirip.Error{Message: "There was an error x509 parsing our pem public key", Err: err}
	}
	pubKey := pub.(*rsa.PublicKey)
	encodedKey, err := rsa.EncryptPKCS1v15(crand.Reader, pubKey, key)
	if err != nil {
		return anirip.Error{Message: "There was an error encrypting our generated key", Err: err}
	}

	// Constructs url params used in encrypted init request
	queryParams := url.Values{
		"s": {flashVars["s"]},
		"c": {countryCode},
		"e": {strings.Replace(episode.URL, ".", "%2", -1)},
		"d": {base64.StdEncoding.EncodeToString(cipherApiJSON)},
		"a": {base64.StdEncoding.EncodeToString(encodedKey)},
	}

	// Executes init request
	bgnInitReqHeaders := http.Header{}
	bgnInitReqHeaders.Add("Content-Type", "application/x-www-form-urlencoded")
	bgnInitReqHeaders.Add("X-Requested-With", "ShockwaveFlash/20.0.0.306")
	bgnInitResponse, err := anirip.GetHTTPResponse("GET",
		"http://www.daisuki.net"+flashVars["init"]+"?"+queryParams.Encode(),
		nil,
		bgnInitReqHeaders,
		cookies)
	if err != nil {
		return err
	}

	// Reads the body of our init requests response
	body, err = ioutil.ReadAll(bgnInitResponse.Body)
	if err != nil {
		return anirip.Error{Message: "There was an error reading init response body", Err: err}
	}

	// Parses our json init response body
	initBody := new(InitResponse)
	if err = json.Unmarshal(body, initBody); err != nil {
		return anirip.Error{Message: "There was an error unmarshalling init response body", Err: err}
	}

	// Attempts to decrypt the encrypted data recieved from InitResponse
	inData, err := base64.StdEncoding.DecodeString(initBody.Rtn)
	if err != nil {
		return anirip.Error{Message: "Unable to base64 decode init return", Err: err}
	}

	// Pads inData to equal full BlockSize if not full block multiple
	if len(inData)%aes.BlockSize != 0 {
		padding := aes.BlockSize - (len(inData) % aes.BlockSize)
		paddedJSON := make([]byte, len(inData)+padding)
		for p, b := range inData {
			paddedJSON[p] = b
		}
		inData = paddedJSON
	}
	outData := make([]byte, len(inData))
	mode = cipher.NewCBCDecrypter(cipherBlock, iv)
	mode.CryptBlocks(outData, inData)

	// Finds the last non-zero byte of outData
	end := len(outData)
	for outData[end-1] == 0 {
		end--
	}

	// If the end of the array isn't the length of outData resize
	if end != len(outData) {
		outData = outData[:end]
	}

	// Finally unmarshalls metadata json after decryption
	metaData := new(MetaData)
	if err = json.Unmarshal(outData, metaData); err != nil {
		return anirip.Error{Message: "There was an error unmarshalling daisuki metadata", Err: err}
	}

	// Stores all the info we needed for getting the episodes info
	episode.Title = strings.SplitN(metaData.TitleStr, " ", 2)[1]
	episode.FileName = anirip.CleanFileName(episode.FileName + episode.Title) // Updates filename with title that we just scraped
	episode.SubtitleInfo = TTMLInfo{
		TTMLUrl: metaData.CaptionURL,
	}
	episode.MediaInfo = HDSInfo{
		ManifestURL: metaData.PlayURL,
	}
	return nil
}