示例#1
0
func handleRequest(conn net.Conn) {
	timeCookie := tool.GetTimeCookie()
	initKey := sha256.Sum256([]byte(passwd + timeCookie))
	nonce := sha512.Sum512([]byte(timeCookie + passwd))

	es, err := chacha20.NewXChaCha(initKey[:], nonce[:XNonceSize])
	ds, err := chacha20.NewXChaCha(initKey[:], nonce[:XNonceSize])
	if err != nil {
		log.Println("Error chacha20 init:  ", err)
		return
	}

	pconn, err := net.Dial("tcp", server+":"+strconv.Itoa(sport))
	if err != nil {
		log.Println("Create connection failed :", err)
		return
	}
	cconn := cipherConn.NewCipherConn(ds, es, pconn)
	defer cconn.Close()

	randomDataLen, _ := tool.ReadInt(initKey[len(initKey)-2:])
	if randomDataLen < 32767 {
		randomDataLen = randomDataLen + 2984
	}

	randomData := make([]byte, randomDataLen+poly1305.TagSize)
	randbytes.Read(randomData)

	var mac [poly1305.TagSize]byte
	poly1305.Sum(&mac, randomData[:randomDataLen], &initKey)
	copy(randomData[randomDataLen:], mac[:])

	// Start proxying
	finish := make(chan bool, 4)

	// write random data head
	_, err = cconn.Write(randomData)
	if err != nil {
		log.Println("Connection write failed :", err)
		return
	}

	go proxy(cconn, conn, finish)
	go proxy(conn, cconn, finish)

	// Wait
	select {
	case <-finish:
	}

	time.Sleep(2 * time.Second)
}
func TestCrypto(t *testing.T) {
	passwd := "hello world"
	initKey := sha256.Sum256([]byte(passwd))
	nonce := sha512.Sum512_224([]byte(passwd))
	cs, err := chacha20.NewXChaCha(initKey[:], nonce[:chacha20.XNonceSize])
	ds, err := chacha20.NewXChaCha(initKey[:], nonce[:chacha20.XNonceSize])
	if err != nil {
		t.Errorf("Bad init")
	}
	dst := make([]byte, len(passwd))
	cs.XORKeyStream(dst, []byte(passwd))
	den := make([]byte, 4)
	ds.XORKeyStream(den, dst[:4])

	if !bytes.Equal([]byte("hell"), den) {
		t.Errorf("Bad Decoder")
	}
}
示例#3
0
func handleRequest(conn net.Conn) {
	defer conn.Close()

	timeCookie := tool.GetTimeCookie()
	initKey := sha256.Sum256([]byte(passwd + timeCookie))
	nonce := sha512.Sum512([]byte(timeCookie + passwd))

	es, err := chacha20.NewXChaCha(initKey[:], nonce[:XNonceSize])
	ds, err := chacha20.NewXChaCha(initKey[:], nonce[:XNonceSize])
	if err != nil {
		log.Println("Error chacha20 init:  ", err)
		return
	}

	//random data head length
	randomDataLen, _ := tool.ReadInt(initKey[len(initKey)-2:])
	if randomDataLen < 32767 {
		randomDataLen = randomDataLen + 2984
	}

	proxy(conn, es, ds, randomDataLen, &initKey)
}
示例#4
0
func TestXChaCha20(t *testing.T) {
	key := []byte{
		0x1b, 0x27, 0x55, 0x64, 0x73, 0xe9, 0x85, 0xd4,
		0x62, 0xcd, 0x51, 0x19, 0x7a, 0x9a, 0x46, 0xc7,
		0x60, 0x09, 0x54, 0x9e, 0xac, 0x64, 0x74, 0xf2,
		0x06, 0xc4, 0xee, 0x08, 0x44, 0xf6, 0x83, 0x89,
	}
	nonce := []byte{
		0x69, 0x69, 0x6e, 0xe9, 0x55, 0xb6, 0x2b, 0x73,
		0xcd, 0x62, 0xbd, 0xa8, 0x75, 0xfc, 0x73, 0xd6,
		0x82, 0x19, 0xe0, 0x03, 0x6b, 0x7a, 0x0b, 0x37,
	}
	expectedKeyStream := []byte{
		0x4f, 0xeb, 0xf2, 0xfe, 0x4b, 0x35, 0x9c, 0x50,
		0x8d, 0xc5, 0xe8, 0xb5, 0x98, 0x0c, 0x88, 0xe3,
		0x89, 0x46, 0xd8, 0xf1, 0x8f, 0x31, 0x34, 0x65,
		0xc8, 0x62, 0xa0, 0x87, 0x82, 0x64, 0x82, 0x48,
		0x01, 0x8d, 0xac, 0xdc, 0xb9, 0x04, 0x17, 0x88,
		0x53, 0xa4, 0x6d, 0xca, 0x3a, 0x0e, 0xaa, 0xee,
		0x74, 0x7c, 0xba, 0x97, 0x43, 0x4e, 0xaf, 0xfa,
		0xd5, 0x8f, 0xea, 0x82, 0x22, 0x04, 0x7e, 0x0d,
		0xe6, 0xc3, 0xa6, 0x77, 0x51, 0x06, 0xe0, 0x33,
		0x1a, 0xd7, 0x14, 0xd2, 0xf2, 0x7a, 0x55, 0x64,
		0x13, 0x40, 0xa1, 0xf1, 0xdd, 0x9f, 0x94, 0x53,
		0x2e, 0x68, 0xcb, 0x24, 0x1c, 0xbd, 0xd1, 0x50,
		0x97, 0x0d, 0x14, 0xe0, 0x5c, 0x5b, 0x17, 0x31,
		0x93, 0xfb, 0x14, 0xf5, 0x1c, 0x41, 0xf3, 0x93,
		0x83, 0x5b, 0xf7, 0xf4, 0x16, 0xa7, 0xe0, 0xbb,
		0xa8, 0x1f, 0xfb, 0x8b, 0x13, 0xaf, 0x0e, 0x21,
		0x69, 0x1d, 0x7e, 0xce, 0xc9, 0x3b, 0x75, 0xe6,
		0xe4, 0x18, 0x3a,
	}

	c, err := chacha20.NewXChaCha(key, nonce)
	if err != nil {
		t.Error(err)
	}
	buf := make([]byte, len(expectedKeyStream))
	c.XORKeyStream(buf, buf)
	if !bytes.Equal(expectedKeyStream, buf) {
		t.Errorf("Bad keystream: expected %x, was %x", expectedKeyStream, buf)
	}
}
示例#5
0
func progd_forword(ar cmdoptS) {

	//create metadata leveldb

	dbi, err := bolt.Open(ar.out_dir+"/md", 0600, nil)

	if err != nil {
		fmt.Println(err.Error())
		os.Exit(-1)
	}
	tx, err := dbi.Begin(true)
	if err != nil {
		fmt.Println(err.Error())
		os.Exit(-1)
	}
	defer tx.Rollback()
	db, err := tx.CreateBucket([]byte("Ketv1"))

	if err != nil {
		fmt.Println(err.Error())
		os.Exit(-1)
	}

	//generate crypto nonce

	nonce, _ := GenerateRandomBytes(24)

	//store it

	err = db.Put([]byte("nonce"), nonce)
	if err != nil {
		fmt.Println(err.Error())
		os.Exit(-1)
	}

	//calc key

	keyhasher := sha3.NewShake256()

	keyhasher.Write(nonce)
	keyhasher.Write([]byte(ar.secret_key))

	xchachakey := make([]byte, 32)
	keyhasher.Read(xchachakey)

	poly1305key := make([]byte, 32)
	keyhasher.Read(poly1305key)

	//init stream

	var LimitedSizeWriteToFilei LimitedSizeWriteToFile
	LimitedSizeWriteToFilei.InitNow()
	LimitedSizeWriteToFilei.TargetPatten = ar.out_dir + "/df%X"
	if !ar.div_unitk {
		LimitedSizeWriteToFilei.BytesPerFile = int64(ar.div_at) * const_Mbyte
	} else {
		LimitedSizeWriteToFilei.BytesPerFile = int64(ar.div_at) * const_Kbyte
	}

	cryptos, err := chacha20.NewXChaCha(xchachakey, nonce)

	HashWriter := sha3.NewShake256()

	CyDWriter := io.MultiWriter(LimitedSizeWriteToFilei, HashWriter)

	Data_writer := NewEncryptedWriter(cryptos, CyDWriter)

	CompressedStream := snappy.NewWriter(Data_writer)

	TarStream := tar.NewWriter(CompressedStream)

	GenFileList(ar.in_dir)

	for id := range rfi {
		filedes, err := os.Open(ar.in_dir + "/" + rfi[id])
		if err != nil {
			fmt.Println("Failed to open file " + rfi[id] + ":" + err.Error())
		}
		filein, _ := filedes.Stat()
		hdr := &tar.Header{
			Name: rfi[id],
			Mode: 0600,
			Size: filein.Size(),
		}

		if err := TarStream.WriteHeader(hdr); err != nil {
			log.Fatalln(err)
		}

		_, err = io.Copy(TarStream, filedes)

		if err != nil {
			fmt.Println("Failed to Write file " + rfi[id] + ":" + err.Error())
		}

		filedes.Close()

	}

	if err := TarStream.Close(); err != nil {
		log.Fatalln(err)
	}

	_, _, nd := LimitedSizeWriteToFilei.Finialize()

	FileHash := make([]byte, 64)
	HashWriter.Read(FileHash)

	var poly1305sum [16]byte
	var poly1305sum_key [32]byte

	copy(poly1305sum_key[:], poly1305key)

	poly1305.Sum(&poly1305sum, FileHash, &poly1305sum_key)

	err = db.Put([]byte("poly1305sum"), poly1305sum[:])
	if err != nil {
		fmt.Println(err.Error())
		os.Exit(-1)
	}

	bb := new(bytes.Buffer)
	binary.Write(bb, binary.LittleEndian, nd)

	err = db.Put([]byte("packagesum"), bb.Bytes())
	if err != nil {
		fmt.Println(err.Error())
		os.Exit(-1)
	}

	//we won't use it anymore
	if err := tx.Commit(); err != nil {
		fmt.Println(err.Error())
		os.Exit(-1)
	}
	dbi.Close()

	//finially we call par2 to compute reconstruction data
	if ar.parrate != 0 {
		_, err := exec.LookPath("par2")
		if err != nil {
			fmt.Println("Unable to whereis par2, reconstruction data compute was ignored:" + err.Error())
		}

		DirIf, _ := os.Open(ar.out_dir)
		DirIfs, _ := DirIf.Readdirnames(-1)

		cmdargs := []string{"c", "-a", "mdpp", "-r" + strconv.Itoa(ar.parrate), "-v", "--"}
		cmdargs = append(cmdargs, DirIfs...)

		cmd := exec.Command("par2", cmdargs...)
		cmd.Stdout = os.Stdout
		Absp, _ := filepath.Abs(ar.out_dir)
		cmd.Dir = Absp
		err = cmd.Start()
		if err != nil {
			fmt.Println("Unable to exec par2, reconstruction data compute was ignored:" + err.Error())
		}
		err = cmd.Wait()
		if err != nil {
			fmt.Println("par2 was finished unsuccessfully, reconstruction data compute was ignored(or failed):" + err.Error())
		}
	}

	fmt.Printf("Hash: %x\n", FileHash)
	fmt.Printf("Key: %s\n", ar.secret_key)

}
示例#6
0
func progd_reverse(ar cmdoptS) {

	if ar.parrate != 0 { //we do not care the actual number
		_, err := exec.LookPath("par2")
		if err != nil {
			fmt.Println("Unable to whereis par2, metadata reconstruction was ignored:" + err.Error())
		}

		cmd := exec.Command("par2", "r", "mdpp.par2", "-v", "--", "md")
		cmd.Stdout = os.Stdout
		Absp, _ := filepath.Abs(ar.in_dir)
		cmd.Dir = Absp
		err = cmd.Start()
		if err != nil {
			fmt.Println("Unable to exec par2, metadata reconstruction data compute was ignored:" + err.Error())
		}
		err = cmd.Wait()
		if err != nil {
			fmt.Println("par2 was finished unsuccessfully, metadata reconstruction data compute was ignored(or failed):" + err.Error())
		}
	}

	//Open metadata leveldb
	dbi, err := bolt.Open(ar.in_dir+"/md", 0600, nil)

	if err != nil {
		fmt.Println(err.Error())
		os.Exit(-1)
	}
	tx, err := dbi.Begin(false)
	if err != nil {
		fmt.Println(err.Error())
		os.Exit(-1)
	}
	defer tx.Rollback()
	db := tx.Bucket([]byte("Ketv1"))

	if err != nil {
		fmt.Println(err.Error())
		os.Exit(-1)
	}

	ndb := db.Get([]byte("packagesum"))

	if err != nil {
		fmt.Println(err.Error())
		os.Exit(-1)
	}

	var nd int64

	missing_file := make([]string, 0, 25)
	all_file := make([]string, 0, 25)

	binary.Read(bytes.NewBuffer(ndb), binary.LittleEndian, nd)
	var cfn int64
	for cfn <= nd {
		cnnn := fmt.Sprintf(ar.in_dir+"/df%X", cfn)
		all_file = append(all_file, fmt.Sprintf("df%X", cfn))

		if _, err := os.Stat(cnnn); err != nil {
			if ar.parrate == 0 {
				missing_file = append(missing_file, fmt.Sprintf("df%X", cfn))
			} else {

				//touch the missing file so that par2 will try to recover this
				cfnd, err := os.Create(cnnn)

				if err != nil {
					fmt.Println(err.Error())
					os.Exit(-1)
				}

				cfnd.Close()

				missing_file = append(missing_file, fmt.Sprintf("df%X", cfn))

			}
		}
		cfn++
	}

	if len(missing_file) != 0 {
		if ar.parrate == 0 {
			fmt.Println("%d file missing", len(missing_file))

			for cf := range missing_file {
				fmt.Println(cf)
			}

			fmt.Println("Failed to reverse operate as there is file missing.")
			os.Exit(-1)

		} else {
			fmt.Println("%d file missing, but reconstruction by par2 underway.")

			for cf := range missing_file {
				fmt.Println(cf)
			}
		}
	}

	data_reconstruction_unsuccessful := true

	if ar.parrate != 0 { //we do not care the actual number
		_, err := exec.LookPath("par2")
		if err != nil {
			fmt.Println("Unable to whereis par2, data reconstruction was ignored:" + err.Error())
		}

		cmdargs := []string{"r", "mdpp.par2", "-v", "--"}

		cmdargs = append(cmdargs, all_file...)

		cmd := exec.Command("par2", cmdargs...)
		cmd.Stdout = os.Stdout
		Absp, _ := filepath.Abs(ar.in_dir)
		cmd.Dir = Absp
		err = cmd.Start()
		if err != nil {
			fmt.Println("Unable to exec par2, metadata reconstruction was ignored:" + err.Error())
		}
		err = cmd.Wait()
		if err != nil {
			fmt.Println("par2 was finished unsuccessfully, data reconstruction was ignored(or failed):" + err.Error())
		} else {
			data_reconstruction_unsuccessful = false
		}
	}

	if ar.parrate != 0 && data_reconstruction_unsuccessful {
		fmt.Println("operation failed: unable to reconstruct.")
		fmt.Println("If data were correct, remove parrate might do.")

		for cf := range missing_file {
			os.Remove(fmt.Sprint("%s/%s", ar.in_dir, cf))
		}

		os.Exit(-1)
	}

	//now we do the actual job

	nonce := db.Get([]byte("nonce"))
	if err != nil {
		fmt.Println(err.Error())
		os.Exit(-1)
	}

	//calc key

	keyhasher := sha3.NewShake256()

	keyhasher.Write(nonce)
	keyhasher.Write([]byte(ar.secret_key))

	xchachakey := make([]byte, 32)
	keyhasher.Read(xchachakey)

	poly1305key := make([]byte, 32)
	keyhasher.Read(poly1305key)

	//set up stream

	var LimitedSizeReadFromi LimitedSizeReadFrom

	LimitedSizeReadFromi.InitNow()

	LimitedSizeReadFromi.TargetPatten = ar.in_dir + "/df%X"

	cryptos, err := chacha20.NewXChaCha(xchachakey, nonce)

	HashWriter := sha3.NewShake256()

	Tread := io.TeeReader(LimitedSizeReadFromi, HashWriter)

	DataReader := NewDecryptedReader(Tread, cryptos)

	DeCompressedStream := snappy.NewReader(DataReader)

	TarStream := tar.NewReader(DeCompressedStream)

	for {
		hdr, err := TarStream.Next()
		if err == io.EOF {
			// end of tar archive
			break
		}
		if err != nil {
			log.Fatalln(err)
		}
		filenamex := hdr.Name
		if !IsPathAllowed(hdr.Name) {
			filenamex = url.QueryEscape(hdr.Name)
		}

		dirc := filepath.Dir(ar.out_dir + "/" + filenamex)
		os.MkdirAll(dirc, 0700)

		cfhd, err := os.Create(ar.out_dir + "/" + filenamex)

		if err != nil {
			log.Fatalln(err)
		}

		_, err = io.Copy(cfhd, TarStream)

		if err != nil {
			log.Fatalln(err)
		}

		cfhd.Close()

	}

	LimitedSizeReadFromi.Finialize()

	FileHash := make([]byte, 64)
	HashWriter.Read(FileHash)
	fmt.Printf("Hash: %x\n", FileHash)

	var poly1305sum [16]byte
	var poly1305sum_key [32]byte
	poly1305sums := db.Get([]byte("poly1305sum"))

	copy(poly1305sum[:], poly1305sums)
	copy(poly1305sum_key[:], poly1305key)

	iscorrect := poly1305.Verify(&poly1305sum, FileHash, &poly1305sum_key)

	dbi.Close()

	if iscorrect == true {
		fmt.Println("Correct File data")
		os.Exit(0)
	} else {
		fmt.Println("File data is't match!")
		os.Exit(-2)
	}

}