如何通过Postgresql选择不同的顺序

时间:2017-05-01 18:42:53

标签: postgresql

你好我想查询一个表并使用不同的ordey来选择distinct,我想用desc。 我怎样才能做到这一点?

CREATE TABLE test_dupl2(id SERIAL, letter_one TEXT, number_int INT, primary key (id));

INSERT INTO test_dupl2(letter_one,number_int) VALUES ('A',1), ('A',2), ('B',1), ('A', 9), ('B', 4);

我的查询

select letter_one, number_int from
    (SELECT DISTINCT ON (letter_one) letter_one, number_int FROM test_dupl2) as foo
order by foo.number_int desc;

输出错误:

('A', 1), ('B', 1)

我想要的输出:

('A', 9), ('B', 4)

Postgresql9.4

1 个答案:

答案 0 :(得分:1)

import tensorflow as tf
import numpy as np
import tensorflow as tf
import numpy as np
# more imports


def extract_number(f): # used to get latest checkpint file
    s = re.findall("epoch(\d+).ckpt",f)
    return (int(s[0]) if s else -1,f)

def restore(init_op, sess, saver): # called to restore or just initialise model
    list = glob(os.path.join("./params/e*"))

    if list:

        file = max(list,key=extract_number)

        saver.restore(sess, file[:-5])


    sess.run(init_op)
    return


with tf.Graph().as_default() as g:

    # build models


    total_batch = data.train.num_examples / batch_size

    epochLimit = 51

    saver = tf.train.Saver()

    init_op = tf.global_variables_initializer()


    with tf.Session() as sess:


        saver = tf.train.Saver()

        init_op = tf.global_variables_initializer()

        restore(init_op, sess, saver)


        epoch = global_step.eval()


        while epoch < epochLimit:

            total_batch = data.train.num_examples / batch_size

            for i in range(int(total_batch)):

                sys.stdout.flush()

                voxels = newData.eval()

                batch_z = np.random.uniform(-1, 1, [batch_size, z_size]).astype(np.float32)

                sess.run(opt_G, feed_dict={z:batch_z, train:True})
                sess.run(opt_D, feed_dict={input:voxels, z:batch_z, train:True})


                with open("out/loss.csv", 'a') as f:
                    batch_loss_G = sess.run(loss_G, feed_dict={z:batch_z, train:False})
                    batch_loss_D = sess.run(loss_D, feed_dict={input:voxels, z:batch_z, train:False})
                    msgOut = "Epoch: [{0}], i: [{1}], G_Loss[{2:.8f}], D_Loss[{3:.8f}]".format(epoch, i, batch_loss_G, batch_loss_D)

                    print(msgOut)

            epoch=epoch+1
            sess.run(global_step.assign(epoch))
            saver.save(sess, "params/epoch{0}.ckpt".format(epoch))

            batch_z = np.random.uniform(-1, 1, [batch_size, z_size]).astype(np.float32)
            voxels = sess.run(x_, feed_dict={z:batch_z})

            v = voxels[0].reshape([32, 32, 32]) > 0
            util.save_binvox(v, "out/epoch{0}.vox".format(epoch), 32)