Problem F

本文介绍了一道关于硬币兑换的编程题目,旨在找到使用不同面额硬币支付指定金额所需的最少和最多硬币数量的方法。文章详细阐述了解题思路,并提供了一段C++代码实现。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

Problem Description
"Yakexi, this is the best age!" Dong MW works hard and get high pay, he has many 1 Jiao and 5 Jiao banknotes(纸币), some day he went to a bank and changes part of his money into 1 Yuan, 5 Yuan, 10 Yuan.(1 Yuan = 10 Jiao)
"Thanks to the best age, I can buy many things!" Now Dong MW has a book to buy, it costs P Jiao. He wonders how many banknotes at least,and how many banknotes at most he can use to buy this nice book. Dong MW is a bit strange, he doesn't like to get the change, that is, he will give the bookseller exactly P Jiao.

Input
T(T<=100) in the first line, indicating the case number. T lines with 6 integers each: P a1 a5 a10 a50 a100 ai means number of i-Jiao banknotes. All integers are smaller than 1000000.

Output
Two integers A,B for each case, A is the fewest number of banknotes to buy the book exactly, and B is the largest number to buy exactly.If Dong MW can't buy the book with no change, output "-1 -1".

Sample Input
3
33 6 6 6 6 6
10 10 10 10 10 10
11 0 1 20 20 20

Sample Output
6 9
1 10
-1 -1
题意:有1 5 10 50 100这几种面值的钱,且个数是给定的,给你一个钱数,让你求最多硬币树,和最少硬币数;
解题思路:求最小钱数当然好求,从大到小开始贪心,但是求最多硬币数的时候就有点问题了,试了很多种方法,可以用小硬币来去换已经球出来的最小硬币的个数,还完了可能就是最大的了;
感悟:这两天做题有点慢,每个题都得先先想,而且还有很多没见过的东西,还得先查资料,得加快进度了。
代码:

#include
#include
#include
using namespace std;
int mina(int a[],int p,int a_m[])
{
    int ans=0;
    for(int i=5;i>1;i--)
    {
        if(p>=a[i]*a_m[i])//看看当前剩下的钱是不是比枚举到的钱数大
        {
            ans+=a[i];
            p-=a[i]*a_m[i];
        }
        else//小
        {
            ans+=p/a_m[i];
            p%=a_m[i];
        }

    }
    if(p>a[1]) return -1;
    else return ans+p;
}
int maxa(int a[],int p,int a_m[],int sum[])
{
    int ans=0;
    for(int i=5;i>1;i--)
    {
        if(p<=sum[i-1])//p比当前金币的总价值小
            continue;
        else//大
        {
            int t;
            t=((p-sum[i-1])/a_m[i])+(((p-sum[i-1])%a_m[i])?1:0);
            //(p-sum[i-1])是除去当前钱数之前的钱数之和
            ans+=t;
            p-=t*a_m[i];
            //printf("t=%d a[i]_m=%d p=%d\n",t,a_m[i],p);
        }
    }
    //printf("P=%d a[1]=%d\n",p,a[1]);
    if(p>a[1]) return -1;
    else return ans+p;

}
int solve(int a[],int p,int a_m[])
{
    int minn=0,maxn=0;
    int sum[6]={0};
    for(int i=1;i<6;i++)
        sum[i]=sum[i-1]+a[i]*a_m[i];
    minn=mina(a,p,a_m);
    maxn=maxa(a,p,a_m,sum);
    //printf("minn=%d maxn=%d\n",minn,maxn);
    if(minn==-1)printf("-1 -1\n");
    else
    {
        if(maxn==-1)printf("-1 -1\n");
        else
            printf("%d %d\n",minn,maxn);
    }
}
int main()
{
    //freopen("in.txt", "r", stdin);
    int a[6],a_m[6]={0,1,5,10,50,100},sum=0,n,p;
    scanf("%d",&n);
    for(int i=0;i
    {
        memset(a,0,sizeof(a));
        scanf("%ld%ld%ld%ld%ld%ld",&p,&a[1],&a[2],&a[3],&a[4],&a[5]);
        for(int j=1;j<6;j++)
        {
            sum+=a_m[j]*a[j];
        }
        if(sum


        else solve(a,p,a_m);
    }
    return 0;
}

转载于:https://www.cnblogs.com/wuwangchuxin0924/p/5781663.html

java.lang.Exception: org.apache.hadoop.hbase.io.hfile.CorruptHFileException: Problem reading HFile Trailer from file hdfs://192.168.8.201:8020/apps/hbase/data/data/gt_dw/profile_gid_lbs_locvalue/7ef0422f73082b2d140d755a08ab6904/lbs/75c83b238e0b4be496eecf33eed5e5c3     at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462) ~[hadoop-mapreduce-client-common-2.7.2.jar:na]     at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522) ~[hadoop-mapreduce-client-common-2.7.2.jar:na] Caused by: org.apache.hadoop.hbase.io.hfile.CorruptHFileException: Problem reading HFile Trailer from file hdfs://192.168.8.201:8020/apps/hbase/data/data/gt_dw/profile_gid_lbs_locvalue/7ef0422f73082b2d140d755a08ab6904/lbs/75c83b238e0b4be496eecf33eed5e5c3     at org.apache.hadoop.hbase.io.hfile.HFile.pickReaderVersion(HFile.java:463) ~[hbase-server-0.98.13-hadoop2.jar:0.98.13-hadoop2]     at org.apache.hadoop.hbase.io.hfile.HFile.createReader(HFile.java:506) ~[hbase-server-0.98.13-hadoop2.jar:0.98.13-hadoop2]     at com.glab.fz.etl.hfile.util.HFileInputFormat$HFileRecordReader.initialize(HFileInputFormat.java:60) ~[classes/:na]     at org.apache.hadoop.mapred.MapTask$NewTrackingRecordReader.initialize(MapTask.java:548) ~[hadoop-mapreduce-client-core-2.7.2.jar:na]     at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:786) ~[hadoop-mapreduce-client-core-2.7.2.jar:na]     at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341) ~[hadoop-mapreduce-client-core-2.7.2.jar:na]     at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243) ~[hadoop-mapreduce-client-common-2.7.2.jar:na]     at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) ~[na:1.8.0_431]     at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[na:1.8.0_431]     at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) ~[na:1.8.0_431]     at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) ~[na:1.8.0_431]     at java.lang.Thread.run(Thread.java:750) ~[na:1.8.0_431] Caused by: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support.     at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65) ~[hadoop-common-2.7.2.jar:na]     at org.apache.hadoop.io.compress.SnappyCodec.getDecompressorType(SnappyCodec.java:193) ~[hadoop-common-2.7.2.jar:na]     at org.apache.hadoop.io.compress.CodecPool.getDecompressor(CodecPool.java:178) ~[hadoop-common-2.7.2.jar:na]     at org.apache.hadoop.hbase.io.compress.Compression$Algorithm.getDecompressor(Compression.java:327) ~[hbase-common-0.98.13-hadoop2.jar:0.98.13-hadoop2]     at org.apache.hadoop.hbase.io.compress.Compression.decompress(Compression.java:422) ~[hbase-common-0.98.13-hadoop2.jar:0.98.13-hadoop2]     at org.apache.hadoop.hbase.io.encoding.HFileBlockDefaultDecodingContext.prepareDecoding(HFileBlockDefaultDecodingContext.java:91) ~[hbase-common-0.98.13-hadoop2.jar:0.98.13-hadoop2]     at org.apache.hadoop.hbase.io.hfile.HFileBlock.unpack(HFileBlock.java:507) ~[hbase-server-0.98.13-hadoop2.jar:0.98.13-hadoop2]     at org.apache.hadoop.hbase.io.hfile.HFileBlock$AbstractFSReader$1.nextBlock(HFileBlock.java:1255) ~[hbase-server-0.98.13-hadoop2.jar:0.98.13-hadoop2]     at org.apache.hadoop.hbase.io.hfile.HFileBlock$AbstractFSReader$1.nextBlockWithBlockType(HFileBlock.java:1261) ~[hbase-server-0.98.13-hadoop2.jar:0.98.13-hadoop2]     at org.apache.hadoop.hbase.io.hfile.HFileReaderV2.<init>(HFileReaderV2.java:147) ~[hbase-server-0.98.13-hadoop2.jar:0.98.13-hadoop2]     at org.apache.hadoop.hbase.io.hfile.HFileReaderV3.<init>(HFileReaderV3.java:73) ~[hbase-server-0.98.13-hadoop2.jar:0.98.13-hadoop2]     at org.apache.hadoop.hbase.io.hfile.HFile.pickReaderVersion(HFile.java:453) ~[hbase-server-0.98.13-hadoop2.jar:0.98.13-hadoop2]
最新发布
06-16
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值