Example #1
0
func PatchFiles(db *trans.Database, pso2dir, translationName, backupPath, outputPath string, parallel int) (errs []error) {
	translation, err := db.QueryTranslation(translationName)
	if err == nil && translation == nil {
		err = errors.New("translation not found")
	}
	if err != nil {
		return []error{err}
	}

	archives, err := db.QueryArchivesTranslation(translation)
	if err != nil {
		return []error{err}
	}

	pbar := pb.New(len(archives))
	pbar.SetRefreshRate(time.Second / 10)
	pbar.Start()

	queue := make(chan *trans.Archive)
	done := make(chan bool)

	errlock := sync.Mutex{}

	complain := func(err error) bool {
		if err != nil {
			errlock.Lock()
			errs = append(errs, err)
			errlock.Unlock()
			return true
		}

		return false
	}

	for i := 0; i < parallel; i++ {
		go func() {
			for {
				a, ok := <-queue
				if !ok {
					break
				}

				aname := path.Join(pso2dir, a.Name.String())
				af, err := os.OpenFile(aname, os.O_RDONLY, 0)
				if complain(err) {
					continue
				}

				archive, err := ice.NewArchive(util.BufReader(af))
				if complain(err) {
					continue
				}

				files, err := db.QueryFiles(a)
				if complain(err) {
					continue
				}

				fileDirty := false

				var textfiles []*os.File
				for _, f := range files {
					tstrings, err := db.QueryTranslationStringsFile(translation, &f)
					if complain(err) || len(tstrings) == 0 {
						continue
					}

					strings := make([]*trans.String, len(tstrings))
					for i, ts := range tstrings {
						strings[i], err = db.QueryStringTranslation(&ts)
						complain(err)
					}

					file := archive.FindFile(-1, f.Name)
					if file == nil {
						if complain(errors.New(f.Name + ": file not found")) {
							continue
						}
					}
					textfile, err := text.NewTextFile(file.Data)

					collisions := make(map[string]int)

					for _, p := range textfile.Pairs {
						collision := collisions[p.Identifier]
						collisions[p.Identifier] = collision + 1

						var ts *trans.TranslationString
						for i, s := range strings {
							if s.Identifier == p.Identifier && s.Collision == collision {
								ts = &tstrings[i]
								break
							}
						}
						if ts == nil {
							continue
						}

						if p.String != ts.Translation {
							entry := textfile.PairString(&p)
							entry.Text = ts.Translation
							fileDirty = true
						}
					}

					tf, err := ioutil.TempFile("", "")
					if complain(err) {
						continue
					}

					writer := bufio.NewWriter(tf)
					err = textfile.Write(writer)
					writer.Flush()
					if complain(err) {
						tf.Close()
						os.Remove(tf.Name())
						continue
					}
					pos, _ := tf.Seek(0, 1)
					tf.Seek(0, 0)

					archive.ReplaceFile(file, tf, uint32(pos))
					textfiles = append(textfiles, tf)
				}

				if fileDirty {
					ofile, err := ioutil.TempFile("", "")

					aname := path.Join(outputPath, a.Name.String())

					if !complain(err) {
						backupPath := backupPath
						if archive.IsModified() {
							backupPath = ""
						}

						writer := bufio.NewWriter(ofile)
						err = archive.Write(writer)
						writer.Flush()
						ofile.Close()

						if backupPath != "" {
							opath := path.Join(backupPath, path.Base(aname))
							err = os.Rename(aname, opath)
							if err != nil {
								err = util.CopyFile(aname, opath)
							}
						}

						if complain(err) {
							os.Remove(ofile.Name())
						} else {
							err = os.Rename(ofile.Name(), aname)
							if err != nil {
								err = util.CopyFile(ofile.Name(), aname)
								os.Remove(ofile.Name())
							}
							complain(err)
						}
					}
				}

				for _, tf := range textfiles {
					tf.Close()
					os.Remove(tf.Name())
				}

				af.Close()

				pbar.Increment()
			}

			done <- true
		}()
	}

	for i := range archives {
		queue <- &archives[i]
	}
	close(queue)

	for i := 0; i < parallel; i++ {
		<-done
	}

	pbar.Finish()

	return
}
Example #2
0
func main() {
	var flagTrans, flagBackup, flagOutput, flagStrip string
	var flagAidaSkits, flagAidaStrings string
	var flagImport, flagParallel int

	flag.Usage = usage
	flag.IntVar(&flagImport, "i", 0, "import files with the specified version")
	flag.IntVar(&flagParallel, "p", runtime.NumCPU()+1, "max parallel tasks")
	flag.StringVar(&flagTrans, "t", "", "translation name (eng, story-eng, etc.)")
	flag.StringVar(&flagBackup, "b", "", "backup files to this path before modifying them")
	flag.StringVar(&flagStrip, "s", "", "write out a stripped database")
	flag.StringVar(&flagOutput, "o", "", "alternate output directory for repacked files")
	flag.StringVar(&flagAidaSkits, "aidaskits", "", "skit list file")
	flag.StringVar(&flagAidaStrings, "aidastrings", "", "translation csv file")
	flag.Parse()

	if flag.NArg() < 1 {
		fmt.Fprintln(os.Stderr, "no database provided")
		flag.Usage()
		flag.PrintDefaults()
	}

	maxprocs := runtime.GOMAXPROCS(0)
	if maxprocs < flagParallel {
		runtime.GOMAXPROCS(flagParallel)
	}

	dbpath := flag.Arg(0)
	fmt.Fprintf(os.Stderr, "Opening database `%s`...\n", dbpath)
	db, err := trans.NewDatabase(dbpath)
	ragequit(dbpath, err)

	if flagImport != 0 {
		if flagAidaSkits != "" || flagAidaStrings != "" {
			if flagAidaSkits == "" || flagAidaStrings == "" || flagTrans == "" {
				ragequit("", errors.New("-aidaskits, -aidsstrings, and -t must all be specified together"))
			}

			fmt.Fprintln(os.Stderr, "Importing from AIDA files...")
			archiveMap := make(map[string]trans.ArchiveName)

			sf, err := os.Open(flagAidaSkits)
			ragequit(flagAidaSkits, err)

			for err != io.EOF {
				var scanArchive, scanHdr, scanGroup, scanName string
				var n int
				n, err = fmt.Fscanln(sf, &scanArchive, &scanHdr, &scanGroup, &scanName)

				if err != nil || n != 4 {
					continue
				}

				aname, err := trans.ArchiveNameFromString(scanArchive)
				if complain(scanArchive, err) {
					continue
				}

				archiveMap[scanName] = *aname
			}

			sf.Close()

			f, err := os.Open(flagAidaStrings)
			ragequit(flagAidaStrings, err)

			r := csv.NewReader(f)
			r.TrimLeadingSpace = true
			r.FieldsPerRecord = 5

			t, err := db.QueryTranslation(flagTrans)
			if t == nil {
				t, err = db.InsertTranslation(flagTrans)
				ragequit(flagTrans, err)
			}

			db.Begin()
			collisions := make(map[string]map[string]int)
			for {
				var line []string // {filename, type, zeroUnk, identifier, string}
				line, err = r.Read()

				if err != nil {
					break
				}

				line[0] = strings.Replace(line[0], "\\", "/", -1)
				archive := path.Dir(line[0])

				aname, ok := archiveMap[archive]
				if !ok {
					ragequit(archive, errors.New("unknown archive name"))
				}

				fname := path.Base(line[0])

				translation := line[4]
				identifier := line[3]

				c := collisions[line[0]]
				if c == nil {
					collisions[line[0]] = make(map[string]int)
					c = collisions[line[0]]
				}

				collision := c[identifier]
				c[identifier] = collision + 1

				a, err := db.QueryArchive(&aname)
				if complain(archive, err) {
					continue
				}

				if a == nil && complain(archive, errors.New("archive not found in database")) {
					continue
				}

				f, err := db.QueryFile(a, fname)
				if complain(fname, err) {
					continue
				}

				if f == nil && complain(archive+": "+fname, errors.New("file not found in database")) {
					continue
				}

				s, err := db.QueryString(f, collision, identifier)
				if complain(fname+": "+identifier, err) {
					continue
				}

				if s == nil {
					complain(fname+": "+identifier, errors.New("string not found in database"))
					continue
				}

				if s.Value != translation {
					ts, _ := db.QueryTranslationString(t, s)
					if ts != nil {
						_, err = db.UpdateTranslationString(ts, translation)
					} else {
						_, err = db.InsertTranslationString(t, s, translation)
					}
				}
			}

			if err != io.EOF {
				ragequit(flagAidaStrings, err)
			}

			f.Close()

			db.End()

			fmt.Fprintln(os.Stderr, "Import complete!")
		} else {
			for i := 1; i < flag.NArg(); i++ {
				name := flag.Arg(i)
				aname, err := trans.ArchiveNameFromString(path.Base(name))
				if complain(name, err) {
					continue
				}

				fmt.Fprintf(os.Stderr, "Opening archive `%s`...\n", name)
				af, err := os.OpenFile(name, os.O_RDONLY, 0)
				if complain(name, err) {
					continue
				}

				archive, err := ice.NewArchive(util.BufReader(af))
				if complain(name, err) {
					af.Close()
					continue
				}

				var a *trans.Archive
				var translation *trans.Translation

				for i := 0; i < archive.GroupCount(); i++ {
					group := archive.Group(i)

					for _, file := range group.Files {
						if file.Type == "text" {
							fmt.Fprintf(os.Stderr, "Importing file `%s`...\n", file.Name)

							t, err := text.NewTextFile(file.Data)
							if complain(file.Name, err) {
								continue
							}

							if a == nil {
								a, err = db.QueryArchive(aname)
								if complain(name, err) {
									continue
								}

								if a == nil {
									a, err = db.InsertArchive(aname)
									if complain(name, err) {
										continue
									}
								}
							}

							f, err := db.QueryFile(a, file.Name)
							if complain(file.Name, err) {
								continue
							}

							if f == nil {
								f, err = db.InsertFile(a, file.Name)
								if complain(file.Name, err) {
									continue
								}
							}

							collisions := make(map[string]int)

							db.Begin()
							for _, p := range t.Pairs {
								collision := collisions[p.Identifier]
								collisions[p.Identifier] = collision + 1

								s, err := db.QueryString(f, collision, p.Identifier)
								if complain(f.Name+": "+p.Identifier, err) {
									break
								}

								if s != nil {
									if s.Value != p.String {
										if flagTrans != "" {
											if translation == nil {
												translation, err = db.QueryTranslation(flagTrans)
												if translation == nil {
													translation, err = db.InsertTranslation(flagTrans)
													if complain(flagTrans, err) {
														break
													}
												}
											}

											ts, _ := db.QueryTranslationString(translation, s)
											if ts != nil {
												_, err = db.UpdateTranslationString(ts, p.String)
											} else {
												_, err = db.InsertTranslationString(translation, s, p.String)
											}
										} else {
											_, err = db.UpdateString(s, flagImport, p.String)
										}

										if complain(f.Name+": "+p.Identifier, err) {
											break
										}
									}
								} else {
									if flagTrans != "" {
										complain(f.Name+": "+p.Identifier+": "+p.String, errors.New("translated identifier does not exist"))
									} else {
										_, err := db.InsertString(f, flagImport, collision, p.Identifier, p.String)
										if complain(f.Name+": "+p.Identifier, err) {
											break
										}
									}
								}
							}
							db.End()
						}
					}
				}

				af.Close()
			}
		}
	} else if flagTrans != "" {
		if flag.NArg() < 2 {
			fmt.Fprintln(os.Stderr, "no pso2 dir provided")
			return
		}

		if flagBackup != "" {
			err := os.MkdirAll(flagBackup, 0777)
			ragequit(flagBackup, err)
		}

		pso2dir := flag.Arg(1)
		if flagOutput == "" {
			flagOutput = pso2dir
		} else {
			err := os.MkdirAll(flagOutput, 0777)
			ragequit(flagOutput, err)
		}

		errs := cmd.PatchFiles(db, pso2dir, flagTrans, flagBackup, flagOutput, flagParallel)

		for _, err := range errs {
			complain("", err)
		}
	}

	db.Close()

	if flagStrip != "" {
		err := cmd.StripDatabase(dbpath, flagStrip)
		complain(flagStrip, err)
	}
}
Example #3
0
func main() {
	var flagPrint bool
	var flagExtract string
	var flagWrite string
	var flagReplace flagReplaceType

	flag.Usage = usage
	flag.BoolVar(&flagPrint, "p", false, "print details about the archive")
	flag.StringVar(&flagExtract, "x", "", "extract the archive to a folder")
	flag.StringVar(&flagWrite, "w", "", "write a repacked archive")
	flag.Var(&flagReplace, "r", `replace a file while repacking, use with -w (comma-separated, entry format is "filename:path". an empty path deletes the file from the archive)`)
	flag.Parse()

	if flag.NArg() != 1 {
		fmt.Fprintln(os.Stderr, "no archive provided")
		flag.Usage()
		flag.PrintDefaults()
	}

	apath := flag.Arg(0)
	fmt.Fprintf(os.Stderr, "Opening archive `%s`...\n", apath)
	f, err := os.OpenFile(apath, os.O_RDONLY, 0)
	ragequit(apath, err)

	a, err := ice.NewArchive(util.BufReader(f))
	ragequit(apath, err)

	if flagPrint {
		for i := 0; i < a.GroupCount(); i++ {
			group := a.Group(i)

			fmt.Printf("Archive Group %d (0x%04x files)\n", i, len(group.Files))
			for _, file := range group.Files {
				fmt.Printf("\t%s (%s):\t0x%08x\n", file.Name, file.Type, file.Size)
			}
		}
	}

	if flagExtract != "" {
		for i := 0; i < a.GroupCount(); i++ {
			extPath := path.Join(flagExtract, fmt.Sprintf("%d", i))
			os.MkdirAll(extPath, 0777)

			group := a.Group(i)

			for _, file := range group.Files {
				fmt.Println("Extracting", file.Name, "...")

				f, err := os.Create(path.Join(extPath, file.Name))
				ragequit(file.Name, err)

				io.Copy(f, file.Data)
				f.Close()
			}
		}
	}

	if flagWrite != "" {
		ofile, err := os.Create(flagWrite)
		ragequit(flagWrite, err)

		for i := 0; i < a.GroupCount(); i++ {
			group := a.Group(i)

			for _, file := range group.Files {
				if newpath, ok := flagReplace[file.Name]; ok {
					if newpath == "" {
						a.ReplaceFile(&file, nil, 0)
					} else {
						newfile, err := os.Open(newpath)
						ragequit(newpath, err)

						st, err := newfile.Stat()
						ragequit(newpath, err)

						if st.Size() > int64(^uint32(0)) {
							ragequit(newpath, errors.New("file too large"))
						}

						a.ReplaceFile(&file, newfile, uint32(st.Size()))
					}
				}
			}
		}

		fmt.Fprintf(os.Stderr, "Writing to archive `%s`...\n", flagWrite)
		writer := bufio.NewWriter(ofile)
		a.Write(writer)
		writer.Flush()
		ofile.Close()
	}
}