如何将解密的UInt8转换为String?

时间:2015-12-29 18:38:06

标签: swift encryption cryptography

我正在使用CryptoSwift来加密数据。我正在学习如何使用它但是我无法通过第一个基础教程。我无法将加密数据转换回字符串 - 如果我无法清楚地解密数据,那么首先会破坏加密数据的目的。 代码:

let string = "Hi. This is Atlas"

let input: [UInt8] = Array(string.utf8)

print(input)

let key: [UInt8] = [0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00]

let iv: [UInt8] = AES.randomIV(AES.blockSize)

do {
    let encryptedBytes: [UInt8] = try AES(key: key, iv: iv, blockMode: .CBC).encrypt(input, padding: PKCS7())

    print(encryptedBytes)

    let decrypted: [UInt8] = try AES(key: key, iv: iv, blockMode: .CBC).decrypt(encryptedBytes, padding: PKCS7())

    print(decrypted) // << need to convert this array of byted to a string (should be equal to original input)
} catch {
} catch {
}

感谢您的帮助

2 个答案:

答案 0 :(得分:4)

您希望Foundation为您解码UTF8,因为无法直接生成String.UTF8View。因此,首先转换为NSData

let decrypted: [UInt8] = [0x48, 0x65, 0x6c, 0x6c, 0x6f]
let data = NSData(bytes: decrypted, length: decrypted.count)
let str = String(data: data, encoding: NSUTF8StringEncoding)

如果你想在没有基金会的情况下这样做,你可以,但这只是一点点工作。你必须自己管理解码。

extension String {
    init?(utf8Bytes: [UInt8]) {
        var decoder = UTF8()
        var g = utf8Bytes.generate()
        var characters: [Character] = []
        LOOP:
            while true {
                let result = decoder.decode(&g)
                switch result {
                case .Result(let scalar): characters.append(Character(scalar))
                case .EmptyInput: break LOOP
                case .Error: return nil
                }
        }
        self.init(characters)
    }
}

let unicode = String(utf8Bytes: bytes)

(我非常惊讶的是,它并没有内置到Swift stdlib中,因为它非常常见,并且很快可以通过Swift stdlib的其他部分构建。通常情况下这样做在这种情况下,我还没有意识到这一点,所以我的方法可能存在一些微妙的问题。)

答案 1 :(得分:1)

Private Objects() As _3DDefinitions.VertexesObjects,
    ObjectsIndex As Double, FacesIndex As Double, PointsIndex As Double,
    GL_NPPF As Integer = 4, GL_COLOR As Color = Color.Brown,
    GL_Status As _3DDefinitions.GL_LoadAction = GL_LoadAction.GL_Start, GL_TranslatePosition As _3DDefinitions.DPoint,
    GL_Settings As _3DDefinitions.GL_EnableAction = GL_EnableAction.GL_UseDefaultUnit,
    GL_DrawingInitialized As Boolean = False, GL_GraphicsInitialized As Boolean = False,
    GL_Unit As Double = 300


Public Sub GL_LoadVertexes(ByVal Operation As _3DDefinitions.GL_LoadAction)
    GL_Status = Operation
    If Operation = _3DDefinitions.GL_LoadAction.GL_Start And Not GL_DrawingInitialized Then
        GL_DrawingInitialized = True
        GL_GraphicsInitialized = False

        ReDim Preserve Objects(ObjectsIndex)

        FacesIndex = 0
        PointsIndex = 0

    ElseIf Operation = GL_LoadAction.GL_End And GL_GraphicsInitialized And GL_DrawingInitialized Then
        GL_DrawingInitialized = False
        ObjectsIndex = ObjectsIndex + 1

        Draw()

    End If
End Sub

Public Sub LoadVertex3D(ByVal X As Single, ByVal Y As Single, ByVal Z As Single)

    If GL_Status = GL_LoadAction.GL_Start Then
        GL_GraphicsInitialized = True

        ReDim Preserve Objects(ObjectsIndex).Face(FacesIndex).PointVertexes(PointsIndex)''<--Here is the error

        If FindBit(GL_Settings, GL_EnableAction.GL_UseOwnUnit) Then
            With Objects(ObjectsIndex).Face(FacesIndex).PointVertexes(PointsIndex)
                .X = X
                .Y = Y
                .Z = Z
            End With

        ElseIf FindBit(GL_Settings, GL_EnableAction.GL_UseDefaultUnit) Then
            With Objects(ObjectsIndex).Face(FacesIndex).PointVertexes(PointsIndex)
                .X = X * GL_Unit / 10
                .Y = Y * GL_Unit / 10
                .Z = Z * GL_Unit / 10
            End With

        End If

        If PointsIndex = GL_NPPF - 1 Then
            FacesIndex = FacesIndex + 1
            PointsIndex = 0
        Else

            PointsIndex = PointsIndex + 1

        End If

    End If
End Sub

因此,它会将每个let stringDecrypted = String(decrypted.map { Character(UnicodeScalar($0)) }) 映射到UInt8,然后映射到UnicodeScalar。之后,它使用Character的初始化程序从String的数组创建String。