Ejemplo n.º 1
0
func init() {
	// Verifies the existance of an anirip folder in our temp directory
	_, err := os.Stat(tempDir)
	if err != nil {
		os.Mkdir(tempDir, 0777)
	}

	// Checks for the existance of our AdobeHDS script which we will get if we don't have it
	_, err = os.Stat(tempDir + string(os.PathSeparator) + "AdobeHDS.php")
	if err != nil {
		adobeHDSResp, err := anirip.GetHTTPResponse("GET", "https://raw.githubusercontent.com/K-S-V/Scripts/master/AdobeHDS.php", nil, nil, nil)
		if err != nil {
			color.Red("[anirip] There was an error retrieving AdobeHDS.php script from GitHub...")
			return
		}
		defer adobeHDSResp.Body.Close()
		adobeHDSBody, err := ioutil.ReadAll(adobeHDSResp.Body)
		if err != nil {
			color.Red("[anirip] There was an error reading the AdobeHDS.php body...")
			return
		}
		err = ioutil.WriteFile(tempDir+string(os.PathSeparator)+"AdobeHDS.php", adobeHDSBody, 0777)
		if err != nil {
			color.Red("[anirip] There was an error writing AdobeHDS.php to file...")
			return
		}
	}
}
Ejemplo n.º 2
0
// Validates the cookies to be sure that we are still logged in
func validateCookies(session *CrunchyrollSession) (bool, error) {
	// We use the cookie we recieved to attempt a simple authenticated request
	validationReqHeaders := http.Header{}
	validationReqHeaders.Add("Connection", "keep-alive")
	validationResponse, err := anirip.GetHTTPResponse("GET",
		"http://www.crunchyroll.com/",
		nil,
		validationReqHeaders,
		session.Cookies)
	if err != nil {
		return false, err
	}

	// Creates a goquery document for scraping
	validationRespDoc, err := goquery.NewDocumentFromResponse(validationResponse)
	if err != nil {
		return false, anirip.Error{Message: "There was an error parsing cookie validation page", Err: err}
	}

	// Scrapes the document and attempts to find the username
	userName := strings.TrimSpace(validationRespDoc.Find("li.username").First().Text())

	// Checks if the Username used to login is in the home page...
	if validationResponse.StatusCode == 200 && userName != "" {
		return true, nil
	}
	return false, nil
}
Ejemplo n.º 3
0
// Validates the cookies to be sure that we are still logged in
func validateCookies(session *DaisukiSession) (bool, error) {
	// We use the cookie we recieved to attempt a simple authenticated request
	validationReqHeaders := http.Header{}
	validationReqHeaders.Add("referer", "http://www.daisuki.net/us/en/mypage/info.html")
	validationResponse, err := anirip.GetHTTPResponse("GET",
		"https://www.daisuki.net/us/en/mypage/info.html",
		nil,
		validationReqHeaders,
		session.Cookies)
	if err != nil {
		return false, err
	}

	// Creates a goquery document for scraping
	validationRespDoc, err := goquery.NewDocumentFromResponse(validationResponse)
	if err != nil {
		return false, anirip.Error{Message: "There was an error while accessing the validation page", Err: err}
	}

	// Scrapes the document and attempts to find the username
	userName := validationRespDoc.Find("div#Nickname.clearFix.accountInformation div.list02").First().Text()

	// Checks if the Username used to login is in the account info page...
	if validationResponse.StatusCode == 200 && userName != "" {
		return true, nil
	}
	return false, nil
}
Ejemplo n.º 4
0
// Creates new cookies by re-authenticating with Crunchyroll
func createNewCookies(session *CrunchyrollSession) error {
	// Construct formdata for the login request
	formData := url.Values{
		"formname": {"RpcApiUser_Login"},
		"fail_url": {"http://www.crunchyroll.com/login"},
		"name":     {session.User},
		"password": {session.Pass},
	}

	// Performs the HTTP Request that will log the user in
	loginReqHeaders := http.Header{}
	loginReqHeaders.Add("referer", "https://www.crunchyroll.com/login")
	loginReqHeaders.Add("content-type", "application/x-www-form-urlencoded")
	loginResponse, err := anirip.GetHTTPResponse("POST",
		"https://www.crunchyroll.com/?a=formhandler",
		bytes.NewBufferString(formData.Encode()),
		loginReqHeaders,
		[]*http.Cookie{})
	if err != nil {
		return err
	}

	// Sets cookies to recieved cookies and returns
	session.Cookies = loginResponse.Cookies()
	return nil
}
Ejemplo n.º 5
0
// Gets the subtitles xml from daisuki, parses and popuulates XMTT param
func (episode *DaisukiEpisode) getSubtitles(subtitles *TT, cookies []*http.Cookie) error {
	// Gets the current time and sets up a referrer for our subtitle request
	nowMillis := strconv.FormatInt(time.Now().UnixNano()/1000000, 10)

	// Performs the HTTP Request that will get the XML of the subtitles
	subReqHeaders := http.Header{}
	subReqHeaders.Add("referrer", episode.URL)
	subtitleResp, err := anirip.GetHTTPResponse("GET",
		episode.SubtitleInfo.TTMLUrl+"?cashPath="+nowMillis,
		nil,
		subReqHeaders,
		cookies)
	if err != nil {
		return err
	}

	// Reads the bytes from the recieved subtitle response body
	subtitleXML, err := ioutil.ReadAll(subtitleResp.Body)
	if err != nil {
		return anirip.Error{Message: "There was an error reading the search response", Err: err}
	}

	// Because Daisuki uses TTML, it doesn't follow the XML 1.0 guidelines.
	// We create an element array to help fix invalid XML 1.0 characters
	elementArray := strings.SplitAfterN(string(subtitleXML), ">", -1)

	// We want to edit the elements by doing string replacement on the invalid chars
	for i := 0; i < len(elementArray); i++ {
		// Splits each element into one or two peices and inspects their contents
		elementSplit := strings.Split(elementArray[i], "<")
		// While only looking at the text portion (before the "<") do string replacement
		if strings.Contains(elementSplit[0], "&") || strings.Contains(elementSplit[0], "'") || strings.Contains(elementSplit[0], "\"") {
			splitEnd := strings.Replace(elementArray[i], elementSplit[0], "", -1) // often a completely empty string
			// HERE'S WHERE WE EXPLICITY REPLACE ILLEGAL CHARACTERS
			elementArray[i] = strings.Replace(elementSplit[0], "&", "&amp;", -1)
			elementArray[i] = strings.Replace(elementSplit[0], "\"", "&quot;", -1)
			elementArray[i] = strings.Replace(elementSplit[0], "'", "&apos;", -1)
			// ADD ANY OTHERS HERE
			elementArray[i] = elementArray[i] + splitEnd
		}
	}

	// Finally turns the entire XML 1.0 compliant script array into a string
	subtitleXMLString := ""
	for _, element := range elementArray {
		subtitleXMLString = subtitleXMLString + element
	}

	// Parses the xml into our subtitles object
	if err = xml.Unmarshal([]byte(subtitleXMLString), subtitles); err != nil {
		return anirip.Error{Message: "There was an error while reading subtitle information", Err: err}
	}
	return nil
}
Ejemplo n.º 6
0
// Assigns the subtitle to the passed episode and attempts to get the xml subs for this episode
func (episode *CrunchyrollEpisode) getSubtitleData(subtitles *Subtitle, cookies []*http.Cookie) error {
	// Formdata to indicate the source page
	formData := url.Values{
		"current_page": {episode.URL},
	}

	// Querystring to ask for the subtitles data
	queryString := url.Values{
		"req":                {"RpcApiSubtitle_GetXml"},
		"subtitle_script_id": {strconv.Itoa(episode.SubtitleID)},
	}

	// Performs the HTTP Request that will get the XML
	subtitleDataReqHeaders := http.Header{}
	subtitleDataReqHeaders.Add("Host", "www.crunchyroll.com")
	subtitleDataReqHeaders.Add("Origin", "http://static.ak.crunchyroll.com")
	subtitleDataReqHeaders.Add("Content-type", "application/x-www-form-urlencoded")
	subtitleDataReqHeaders.Add("Referer", "http://static.ak.crunchyroll.com/versioned_assets/StandardVideoPlayer.fb2c7182.swf")
	subtitleDataReqHeaders.Add("X-Requested-With", "ShockwaveFlash/19.0.0.245")
	subtitleDataResponse, err := anirip.GetHTTPResponse("POST",
		"http://www.crunchyroll.com/xml/?"+queryString.Encode(),
		bytes.NewBufferString(formData.Encode()),
		subtitleDataReqHeaders,
		cookies)
	if err != nil {
		return err
	}

	// Reads the bytes from the recieved subtitle data xml response body
	subtitleDataBody, err := ioutil.ReadAll(subtitleDataResponse.Body)
	if err != nil {
		return anirip.Error{Message: "There was an error reading the xml response", Err: err}
	}

	// Parses the xml into our results object
	err = xml.Unmarshal(subtitleDataBody, &subtitles)
	if err != nil {
		return anirip.Error{Message: "There was an error reading xml", Err: err}
	}
	return nil
}
Ejemplo n.º 7
0
// Creates new cookies by re-authenticating with Daisuki
func createNewCookies(session *DaisukiSession) error {
	// Construct formdata for the login request
	formData := url.Values{
		"emailAddress": {session.User},
		"password":     {session.Pass},
	}

	// Performs the HTTP Request that will log the user in
	loginReqHeaders := http.Header{}
	loginReqHeaders.Add("referer", "http://www.daisuki.net/us/en/top.html")
	loginReqHeaders.Add("content-type", "application/x-www-form-urlencoded")
	loginResponse, err := anirip.GetHTTPResponse("POST",
		"https://www.daisuki.net/bin/SignInServlet.html/input",
		bytes.NewBufferString(formData.Encode()),
		loginReqHeaders,
		[]*http.Cookie{})
	if err != nil {
		return err
	}

	// Sets cookies to recieved cookies and returns
	session.Cookies = loginResponse.Cookies()
	return nil
}
Ejemplo n.º 8
0
// Parses the xml and returns what we need from the xml
func (episode *CrunchyrollEpisode) GetEpisodeInfo(quality string, cookies []*http.Cookie) error {
	episode.Quality = quality // Sets the quality to the passed quality string

	// Gets the HTML of the episode page
	episodeReqHeaders := http.Header{}
	episodeReqHeaders.Add("referer", "http://www.crunchyroll.com/"+strings.Split(episode.Path, "/")[1])
	episodeResponse, err := anirip.GetHTTPResponse("GET",
		episode.URL,
		nil,
		episodeReqHeaders,
		cookies)
	if err != nil {
		return err
	}

	// Creates the goquery document that will be used to scrape for episode metadata
	episodeDoc, err := goquery.NewDocumentFromResponse(episodeResponse)
	if err != nil {
		return anirip.Error{Message: "There was an error while reading the episode doc", Err: err}
	}

	// Scrapes the episode metadata from the episode page
	episodeMetaDataJSON := episodeDoc.Find("script#liftigniter-metadata").First().Text()

	// Parses the metadata json to a MetaData object
	episodeMetaData := new(EpisodeMetaData)
	if err := json.Unmarshal([]byte(episodeMetaDataJSON), episodeMetaData); err != nil {
		return anirip.Error{Message: "There was an error while parsing episode metadata", Err: err}
	}

	// Formdata to indicate the source page
	formData := url.Values{
		"current_page": {episode.URL},
	}

	// Querystring for getting the crunchyroll standard config
	queryString := url.Values{
		"req":           {"RpcApiVideoPlayer_GetStandardConfig"},
		"media_id":      {strconv.Itoa(episode.ID)},
		"video_format":  {getVideoFormat(episode.Quality)},
		"video_quality": {getVideoQuality(episode.Quality)},
		"auto_play":     {"1"},
		"aff":           {"crunchyroll-website"},
		"show_pop_out_controls":   {"1"},
		"pop_out_disable_message": {""},
		"click_through":           {"0"},
	}

	// Performs the HTTP Request that will get the XML
	standardConfigReqHeaders := http.Header{}
	standardConfigReqHeaders.Add("Host", "www.crunchyroll.com")
	standardConfigReqHeaders.Add("Origin", "http://static.ak.crunchyroll.com")
	standardConfigReqHeaders.Add("Content-type", "application/x-www-form-urlencoded")
	standardConfigReqHeaders.Add("Referer", "http://static.ak.crunchyroll.com/versioned_assets/StandardVideoPlayer.f3770232.swf")
	standardConfigReqHeaders.Add("X-Requested-With", "ShockwaveFlash/22.0.0.192")
	standardConfigResponse, err := anirip.GetHTTPResponse("POST",
		"http://www.crunchyroll.com/xml/?"+queryString.Encode(),
		bytes.NewBufferString(formData.Encode()),
		standardConfigReqHeaders,
		cookies)
	if err != nil {
		return err
	}

	// Gets the xml string from the recieved xml response body
	standardConfigResponseBody, err := ioutil.ReadAll(standardConfigResponse.Body)
	if err != nil {
		return anirip.Error{Message: "There was an error reading the xml response", Err: err}
	}
	xmlString := string(standardConfigResponseBody)

	// Checks for an unsupported region first
	if strings.Contains(xmlString, "<code>") && strings.Contains(xmlString, "</code>") {
		if strings.SplitN(strings.SplitN(xmlString, "<code>", 2)[1], "</code>", 2)[0] == "4" {
			return anirip.Error{Message: "This video is not available in your region", Err: err}
		}
	}

	// Next performs some really basic parsing of the host url
	xmlHostURL := ""
	if strings.Contains(xmlString, "<host>") && strings.Contains(xmlString, "</host>") {
		xmlHostURL = strings.SplitN(strings.SplitN(xmlString, "<host>", 2)[1], "</host>", 2)[0]
	} else {
		return anirip.Error{Message: "No hosts were found for the episode", Err: err}
	}

	// Same type of xml parsing to get the file
	episodeFile := ""
	if strings.Contains(xmlString, "<file>") && strings.Contains(xmlString, "</file>") {
		episodeFile = strings.SplitN(strings.SplitN(xmlString, "<file>", 2)[1], "</file>", 2)[0]
	} else {
		return anirip.Error{Message: "No hosts were found for the episode", Err: err}
	}

	// Parses the URL in order to break out the two urls required for dumping
	url, err := url.Parse(xmlHostURL)
	if err != nil {
		return anirip.Error{Message: "There was an error parsing episode information", Err: err}
	}

	// Sets the RTMP info recieved before returning
	episode.Title = episodeMetaData.Name
	episode.FileName = anirip.CleanFileName(episode.FileName + episode.Title) // Updates filename with title that we just scraped
	episode.MediaInfo = RTMPInfo{
		File:   episodeFile,
		URLOne: url.Scheme + "://" + url.Host + url.Path,
		URLTwo: strings.Trim(url.RequestURI(), "/"),
	}
	return nil
}
Ejemplo n.º 9
0
// Parses the xml and returns what we need from the xml
func (episode *DaisukiEpisode) GetEpisodeInfo(quality string, cookies []*http.Cookie) error {
	episode.Quality = quality // Sets the quality to the passed quality string

	// Gets the HTML of the episode page
	episodeReqHeaders := http.Header{}
	episodeReqHeaders.Add("referer", "http://www.daisuki.net/us/en/anime/detail."+strings.Split(episode.Path, ".")[1]+".html")
	episodeResponse, err := anirip.GetHTTPResponse("GET",
		episode.URL,
		nil,
		episodeReqHeaders,
		cookies)
	if err != nil {
		return err
	}

	// Creates the goquery document that will be used to scrape for episode info
	episodeDoc, err := goquery.NewDocumentFromResponse(episodeResponse)
	if err != nil {
		return anirip.Error{Message: "There was an error while reading the episode doc", Err: err}
	}

	flashVars := map[string]string{}
	episodeDoc.Find("div#movieFlash").Each(func(i int, flash *goquery.Selection) {
		// Finds the movie/large episode and adds it to our map
		flashVarString := flash.Find("script").First().Text()
		flashVarString = strings.SplitN(flashVarString, "}", 2)[0]
		flashVarString = strings.SplitN(flashVarString, "{", 2)[1]
		flashVarString = strings.Replace(flashVarString, "'", "\"", -1)
		flashVarString = strings.Replace(flashVarString, " ", "", -1)
		flashVarString = strings.Replace(flashVarString, "\n", "", -1)
		flashVarsArray := strings.Split(flashVarString, "\"")
		newFlashVarsArray := []string{}
		for f := 1; f < len(flashVarsArray)-1; f++ {
			if !strings.Contains(flashVarsArray[f], ":") && !strings.Contains(flashVarsArray[f], ",") {
				newFlashVarsArray = append(newFlashVarsArray, flashVarsArray[f])
			}
		}
		// Declares and fills our map with all the key,values needed from flashvars
		var e = 0
		for e < len(newFlashVarsArray) {
			flashVars[newFlashVarsArray[e]] = newFlashVarsArray[e+1]
			e = e + 2
		}
	})

	// Check for required fields in flashVars map
	if _, ok := flashVars["s"]; !ok {
		return anirip.Error{Message: "'s' was missing from flashvars", Err: err}
	}
	if _, ok := flashVars["country"]; !ok {
		return anirip.Error{Message: "'country' was missing from flashvars", Err: err}
	}
	if _, ok := flashVars["init"]; !ok {
		return anirip.Error{Message: "'init' was missing from flashvars", Err: err}
	}

	// Gets the current time which we will use in our flashvars request
	nowMillis := strconv.FormatInt(time.Now().UnixNano()/1000000, 10)

	// Performs the HTTP Request that will get the country code
	countryReqHeaders := http.Header{}
	countryReqHeaders.Add("referer", "https://www.crunchyroll.com/login")
	countryReqHeaders.Add("content-type", "application/x-www-form-urlencoded")
	countryResponse, err := anirip.GetHTTPResponse("GET",
		"http://www.daisuki.net"+flashVars["country"]+"?cashPath="+nowMillis,
		nil,
		countryReqHeaders,
		cookies)
	if err != nil {
		return err
	}

	// Reads the body and extracts the country code which will be used in later requests
	body, err := ioutil.ReadAll(countryResponse.Body)
	if err != nil {
		return anirip.Error{Message: "There was an error getting the country code", Err: err}
	}
	countryCode := strings.Split(strings.Split(string(body), "<country_code>")[1], "</country_code>")[0]

	// Generates a new random 256bit key
	key := make([]byte, 32)
	if _, err = io.ReadFull(crand.Reader, key); err != nil {
		return anirip.Error{Message: "There was an error generating 256 bit key", Err: err}
	}

	api := new(ApiData)
	if val, ok := flashVars["ss_id"]; ok {
		api.SS_ID = val
	}
	if val, ok := flashVars["mv_id"]; ok {
		api.MV_ID = val
	}
	if val, ok := flashVars["device_cd"]; ok {
		api.Device_CD = val
	}
	if val, ok := flashVars["ss1_prm"]; ok {
		api.SS1_PRM = val
	}
	if val, ok := flashVars["ss2_prm"]; ok {
		api.SS2_PRM = val
	}
	if val, ok := flashVars["ss3_prm"]; ok {
		api.SS3_PRM = val
	}
	plainApiJSON, err := json.Marshal(api)
	if err != nil {
		return anirip.Error{Message: "There was an error marshalling api json", Err: err}
	}

	// Pads plainApiJSON to equal full BlockSize if not full block multiple
	if len(plainApiJSON)%aes.BlockSize != 0 {
		padding := aes.BlockSize - (len(plainApiJSON) % aes.BlockSize)
		paddedJSON := make([]byte, len(plainApiJSON)+padding)
		for p, b := range plainApiJSON {
			paddedJSON[p] = b
		}
		plainApiJSON = paddedJSON
	}

	// Creates a new aes cipher using the key we generated
	cipherBlock, err := aes.NewCipher(key)
	if err != nil {
		return anirip.Error{Message: "There was an error generating aes cipherblock", Err: err}
	}

	cipherApiJSON := make([]byte, len(plainApiJSON))
	iv := make([]byte, aes.BlockSize)
	mode := cipher.NewCBCEncrypter(cipherBlock, iv)
	mode.CryptBlocks(cipherApiJSON, plainApiJSON)

	// Key uused to re-encrypt our request data to daisuki
	var pemPublicKey = "-----BEGIN PUBLIC KEY-----\n" +
		"MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDFUkwl6OFLNms3VJQL7rb5bLfi\n" +
		"/u8Lkyx2WaDFw78XPWAkZMLfc9aTtROuBv8b6PNnUpqzC/lpxWQFIhgfKgxR6lRq\n" +
		"4SDT2NkIWV5O/3ZbOJzeCAoe9/G7+wdBHMVo23O39SHO3ycMv74N28KbGsnQ8tj0\n" +
		"NZCYyv/ubQeRUCAHfQIDAQAB\n" +
		"-----END PUBLIC KEY-----"

	pemBlock, _ := pem.Decode([]byte(pemPublicKey))
	pub, err := x509.ParsePKIXPublicKey(pemBlock.Bytes)
	if err != nil {
		return anirip.Error{Message: "There was an error x509 parsing our pem public key", Err: err}
	}
	pubKey := pub.(*rsa.PublicKey)
	encodedKey, err := rsa.EncryptPKCS1v15(crand.Reader, pubKey, key)
	if err != nil {
		return anirip.Error{Message: "There was an error encrypting our generated key", Err: err}
	}

	// Constructs url params used in encrypted init request
	queryParams := url.Values{
		"s": {flashVars["s"]},
		"c": {countryCode},
		"e": {strings.Replace(episode.URL, ".", "%2", -1)},
		"d": {base64.StdEncoding.EncodeToString(cipherApiJSON)},
		"a": {base64.StdEncoding.EncodeToString(encodedKey)},
	}

	// Executes init request
	bgnInitReqHeaders := http.Header{}
	bgnInitReqHeaders.Add("Content-Type", "application/x-www-form-urlencoded")
	bgnInitReqHeaders.Add("X-Requested-With", "ShockwaveFlash/20.0.0.306")
	bgnInitResponse, err := anirip.GetHTTPResponse("GET",
		"http://www.daisuki.net"+flashVars["init"]+"?"+queryParams.Encode(),
		nil,
		bgnInitReqHeaders,
		cookies)
	if err != nil {
		return err
	}

	// Reads the body of our init requests response
	body, err = ioutil.ReadAll(bgnInitResponse.Body)
	if err != nil {
		return anirip.Error{Message: "There was an error reading init response body", Err: err}
	}

	// Parses our json init response body
	initBody := new(InitResponse)
	if err = json.Unmarshal(body, initBody); err != nil {
		return anirip.Error{Message: "There was an error unmarshalling init response body", Err: err}
	}

	// Attempts to decrypt the encrypted data recieved from InitResponse
	inData, err := base64.StdEncoding.DecodeString(initBody.Rtn)
	if err != nil {
		return anirip.Error{Message: "Unable to base64 decode init return", Err: err}
	}

	// Pads inData to equal full BlockSize if not full block multiple
	if len(inData)%aes.BlockSize != 0 {
		padding := aes.BlockSize - (len(inData) % aes.BlockSize)
		paddedJSON := make([]byte, len(inData)+padding)
		for p, b := range inData {
			paddedJSON[p] = b
		}
		inData = paddedJSON
	}
	outData := make([]byte, len(inData))
	mode = cipher.NewCBCDecrypter(cipherBlock, iv)
	mode.CryptBlocks(outData, inData)

	// Finds the last non-zero byte of outData
	end := len(outData)
	for outData[end-1] == 0 {
		end--
	}

	// If the end of the array isn't the length of outData resize
	if end != len(outData) {
		outData = outData[:end]
	}

	// Finally unmarshalls metadata json after decryption
	metaData := new(MetaData)
	if err = json.Unmarshal(outData, metaData); err != nil {
		return anirip.Error{Message: "There was an error unmarshalling daisuki metadata", Err: err}
	}

	// Stores all the info we needed for getting the episodes info
	episode.Title = strings.SplitN(metaData.TitleStr, " ", 2)[1]
	episode.FileName = anirip.CleanFileName(episode.FileName + episode.Title) // Updates filename with title that we just scraped
	episode.SubtitleInfo = TTMLInfo{
		TTMLUrl: metaData.CaptionURL,
	}
	episode.MediaInfo = HDSInfo{
		ManifestURL: metaData.PlayURL,
	}
	return nil
}
Ejemplo n.º 10
0
// Given a show pointer, appends all the seasons/episodes found for the show
func (show *CrunchyrollShow) ScrapeEpisodes(showURL string, cookies []*http.Cookie) error {
	// Gets the HTML of the show page
	showResponse, err := anirip.GetHTTPResponse("GET",
		showURL,
		nil,
		nil,
		cookies)
	if err != nil {
		return err
	}

	// Creates a goquery document for scraping
	showDoc, err := goquery.NewDocumentFromResponse(showResponse)
	if err != nil {
		return anirip.Error{Message: "There was an error while accessing the show page", Err: err}
	}

	// Scrapes the show metadata from the show page
	showMetaDataJSON := showDoc.Find("script#liftigniter-metadata").First().Text()

	// Parses the metadata json to a ShowMetaData object
	showMetaData := new(ShowMetaData)
	if err := json.Unmarshal([]byte(showMetaDataJSON), showMetaData); err != nil {
		return anirip.Error{Message: "There was an error while parsing show metadata", Err: err}
	}

	// Sets Title, and Path and URL on our show object
	show.Title = showMetaData.Name
	show.URL = showMetaData.URL
	show.Path = strings.Replace(show.URL, "http://www.crunchyroll.com", "", 1) // Removes the host so we have just the path

	// Searches first for the search div
	showDoc.Find("ul.list-of-seasons.cf").Each(func(i int, seasonList *goquery.Selection) {
		seasonList.Find("li.season").Each(func(i2 int, episodeList *goquery.Selection) {
			// Adds a new season to the show containing all information
			seasonTitle, _ := episodeList.Find("a").First().Attr("title")

			// Adds the title minus any "Episode XX" for shows that only have one season
			show.Seasons = append(show.Seasons, CrunchyrollSeason{
				Title: strings.SplitN(seasonTitle, " Episode ", 2)[0],
			})

			// Within that season finds all episodes
			episodeList.Find("div.wrapper.container-shadow.hover-classes").Each(func(i3 int, episode *goquery.Selection) {
				// Appends all new episode information to newly appended season
				episodeTitle := strings.TrimSpace(strings.Replace(episode.Find("span.series-title.block.ellipsis").First().Text(), "\n", "", 1))
				episodeNumber, _ := strconv.ParseFloat(strings.Replace(episodeTitle, "Episode ", "", 1), 64)
				episodePath, _ := episode.Find("a").First().Attr("href")
				episodeID, _ := strconv.Atoi(episodePath[len(episodePath)-6:])
				show.Seasons[i2].Episodes = append(show.Seasons[i2].Episodes, CrunchyrollEpisode{
					ID:     episodeID,
					Title:  episodeTitle,
					Number: episodeNumber,
					Path:   episodePath,
					URL:    "http://www.crunchyroll.com" + episodePath,
				})
			})
		})
	})

	// Re-arranges seasons and episodes in the shows object so we have first to last
	tempSeasonArray := []CrunchyrollSeason{}
	for i := len(show.Seasons) - 1; i >= 0; i-- {
		// First sort episodes from first to last
		tempEpisodesArray := []CrunchyrollEpisode{}
		for n := len(show.Seasons[i].Episodes) - 1; n >= 0; n-- {
			tempEpisodesArray = append(tempEpisodesArray, show.Seasons[i].Episodes[n])
		}
		// Lets not bother appending anything if there are no episodes in the season
		if len(tempEpisodesArray) > 0 {
			tempSeasonArray = append(tempSeasonArray, CrunchyrollSeason{
				Title:    show.Seasons[i].Title,
				Length:   len(tempEpisodesArray),
				Episodes: tempEpisodesArray,
			})
		}
	}
	show.Seasons = tempSeasonArray

	// Assigns each season a number and episode a filename
	for s, season := range show.Seasons {
		show.Seasons[s].Number = s + 1
		for e, episode := range season.Episodes {
			show.Seasons[s].Episodes[e].FileName = anirip.GenerateEpisodeFileName(show.Title, show.Seasons[s].Number, episode.Number, "")
		}
	}

	// TODO Filter out episodes that aren't yet released (ex One Piece)
	return nil
}
Ejemplo n.º 11
0
func (episode *CrunchyrollEpisode) getSubtitleInfo(subtitles *Subtitle, language string, cookies []*http.Cookie) (string, error) {
	// Formdata to indicate the source page
	formData := url.Values{
		"current_page": {episode.URL},
	}

	// Querystring to ask for the subtitles info
	queryString := url.Values{
		"req":                  {"RpcApiSubtitle_GetListing"},
		"media_id":             {strconv.Itoa(episode.ID)},
		"video_format":         {getVideoFormat(episode.Quality)},
		"video_encode_quality": {getVideoQuality(episode.Quality)},
	}

	// Performs the HTTP Request that will get the XML
	subtitleInfoReqHeaders := http.Header{}
	subtitleInfoReqHeaders.Add("Host", "www.crunchyroll.com")
	subtitleInfoReqHeaders.Add("Origin", "http://static.ak.crunchyroll.com")
	subtitleInfoReqHeaders.Add("Content-type", "application/x-www-form-urlencoded")
	subtitleInfoReqHeaders.Add("Referer", "http://static.ak.crunchyroll.com/versioned_assets/StandardVideoPlayer.fb2c7182.swf")
	subtitleInfoReqHeaders.Add("X-Requested-With", "ShockwaveFlash/19.0.0.245")
	subtitleInfoResponse, err := anirip.GetHTTPResponse("POST",
		"http://www.crunchyroll.com/xml/?"+queryString.Encode(),
		bytes.NewBufferString(formData.Encode()),
		subtitleInfoReqHeaders,
		cookies)
	if err != nil {
		return "", err
	}

	// Reads the bytes from the recieved subtitle info xml response body
	subtitleInfoBody, err := ioutil.ReadAll(subtitleInfoResponse.Body)
	if err != nil {
		return "", anirip.Error{Message: "There was an error reading the xml response", Err: err}
	}

	// If the XML explicity states that there is NO MEDIA, return empty language string
	if strings.Contains("<media_id>None</media_id>", string(subtitleInfoBody)) {
		return "", nil
	}

	// Parses the xml into our results object
	subListResults := SubListResults{}
	if err = xml.Unmarshal(subtitleInfoBody, &subListResults); err != nil {
		return "", anirip.Error{Message: "There was an error while reading subtitle information", Err: err}
	}

	// Finds the subtitle ID of the language we want
	for i := 0; i < len(subListResults.Subtitles); i++ {
		if strings.Contains(subListResults.Subtitles[i].Title, language) {
			subtitles = &subListResults.Subtitles[i]
			episode.SubtitleID = subtitles.ID
			return "eng", nil
		}
	}

	// If we cant find the requested language default to English
	for i := 0; i < len(subListResults.Subtitles); i++ {
		if strings.Contains(subListResults.Subtitles[i].Title, "English") {
			subtitles = &subListResults.Subtitles[i]
			episode.SubtitleID = subtitles.ID
			return "eng", nil
		}
	}

	// Again, if there are no subs found after a succesfull parse, they are either hardcoded or dubbed
	return "", nil
}
Ejemplo n.º 12
0
// Given a show pointer, appends all the seasons/episodes found for the show
func (show *DaisukiShow) ScrapeEpisodes(showURL string, cookies []*http.Cookie) error {
	// Gets the HTML of the show page
	showResponse, err := anirip.GetHTTPResponse("GET",
		showURL,
		nil,
		nil,
		cookies)
	if err != nil {
		return err
	}

	// Creates a goquery document for scraping episodes
	showDoc, err := goquery.NewDocumentFromResponse(showResponse)
	if err != nil {
		return anirip.Error{Message: "There was an error while accessing the show page", Err: err}
	}

	// Sets Title, AdID and Path in the case where the user passes a show URL
	if show.Title == "" {
		show.Title = showDoc.Find("h1#animeTitle").First().Text() // Scrapes the show title fromt he season body if it wasn't set by a search
	}
	if show.URL == "" {
		show.URL = showURL
	}
	if show.Path == "" {
		show.Path = strings.Replace(show.URL, "http://www.daisuki.net", "", 1) // Removes the host so we have just the path
	}
	if show.AdID == "" {
		show.AdID = strings.Replace(show.URL, "http://www.daisuki.net/us/en/anime/detail.", "", 1) // Removes the leading path
		show.AdID = strings.Replace(show.AdID, ".html", "", 1)                                     // Replaces the .html so we have just the AdID
	}

	// Searches first for the episodes/movies
	episodeMap := make(map[int]string)
	showDoc.Find("div#moviesBlock").Each(func(i int, season *goquery.Selection) {
		// Finds the non-movie/latest-or-first episode and adds it to our map
		season.Find("div#content0.content.clearFix.liquid").Each(func(i2 int, movieEpisode *goquery.Selection) {
			// Gets the episode number from that movie
			episodeThumb, exists := movieEpisode.Find("img").First().Attr("delay")
			episodeNumber, err := strconv.Atoi(movieEpisode.Find("p.episodeNumber").Text())
			if err == nil && exists && episodeThumb != "" {
				episodeMap[episodeNumber] = "/us/en/anime/watch." + strings.Split(episodeThumb, "/")[6] + "." + strings.Split(episodeThumb, "/")[7] + ".html"
			}
		})
		// Finds ALL non-movie/latest-or-first episodes
		season.Find("div#contentList0.contentList.clearFix.liquid div.item").Each(func(i2 int, episodeItem *goquery.Selection) {
			// Grabs episode information that isn't empty and has a url associated with an episode number
			episodeThumb, exists := episodeItem.Find("img").First().Attr("delay")
			episodeNumber, err := strconv.Atoi(episodeItem.Find("p.episodeNumber").Text())
			if err == nil && exists && episodeThumb != "" {
				episodeMap[episodeNumber] = "/us/en/anime/watch." + strings.Split(episodeThumb, "/")[6] + "." + strings.Split(episodeThumb, "/")[7] + ".html"
			}
		})
	})

	// Re-arranges seasons and episodes in the shows object so we have first to last
	show.Seasons = append(show.Seasons, DaisukiSeason{ // appends a new season that we'll append episodes to
		Title:  show.Title,
		Number: 1,
		Length: len(episodeMap),
	})
	for i := 0; i < len(episodeMap); i++ {
		show.Seasons[0].Episodes = append(show.Seasons[0].Episodes, DaisukiEpisode{
			Number: float64(i + 1),
			Path:   episodeMap[i+1],
			URL:    "http://www.daisuki.net" + episodeMap[i+1],
		})
	}

	// Assigns each season a number and episode a filename
	for s, season := range show.Seasons {
		for e, episode := range season.Episodes {
			// Generates a partial file name that we'll later improve on when we get the episode html
			show.Seasons[s].Episodes[e].FileName = anirip.GenerateEpisodeFileName(show.Title, show.Seasons[s].Number, episode.Number, "")
		}
	}
	return nil
}