Super A^B mod C

本文介绍了一种处理大整数快速幂取模运算的算法,通过使用快速幂和欧拉函数来降低计算复杂度,适用于A^B mod C这类问题,其中B可能非常大。文章提供了详细的算法实现步骤及C++代码示例。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

E - Super A^B mod C

 

Given A,B,C, You should quickly calculate the result of A^B mod C. (1<=A,C<=1000000000,1<=B<=10^1000000).

 

Input

There are multiply testcases. Each testcase, there is one line contains three integers A, B and C, separated by a single space.

Output

For each testcase, output an integer, denotes the result of A^B mod C.

Sample Input

3 2 4
2 10 1000

Sample Output

1
24

 

 

题目简单明了,就是让你算A^B%mod c,但是因为B实在是太大了,所以要实行降幂处理,其实有这么一个公式可以解决:
这里写图片描述

代码如下

 

#include<cstdio>
#include<cmath>
#include<vector>
#include<cstring>
#include<iostream>
#include<algorithm>
using namespace std;
typedef __int64 ll;
const ll N=1001000;
ll p[N];
char s[N];	
ll A,B,C;
ll mypow(ll m,ll n ){//快速幂 
	ll ans=1;
	ll b=m;
	while(n){
	   if(n&1)
              ans=(ans*b)%C;
	   b=b*b%C;
           n/=2;		
	}
	return ans;
}
void prime(){//素数打表 
     memset(p,0,sizeof(p));
     p[1]=1;
     for(ll i=2;i<=sqrt(N);i++){
	for(ll j=2;j<=N/i;j++)
		p[i*j]=1;
     }	
}
ll ola(ll n){//欧拉函数 
	ll r,aa;
	r=aa=n;
	for(ll i=2;i<=sqrt(n);i++){
	     if(!p[i]){
		if(aa%i==0){
		   r=r/i*(i-1);
		 while(aa%i==0)
		   aa/=i;
		  }
	     }
	}
	if(aa>1)
	   r=r/aa*(aa-1);
	return r;
}
int main()
{
	prime();
	while(~scanf("%I64d%s%I64d",&A,s,&C)){
	   ll l=strlen(s);   B=0;
	   ll oc=ola(C);
           ll i,ans;
	   for( i=0;i<l;i++){
	      B=B*10+s[i]-'0';
		 if(B>oc)    break;
	   }
	   if(i==1)
		ans=mypow(A,B);
	    else{
		 B=0;
		  for( i=0;i<l;i++)//降幂 
		    B=(B*10+s[i]-'0')%oc;	
		ans=mypow(A,B+oc);
	    }
	    cout<<ans<<endl;
	}	
	return 0;
}

 

 

Traceback (most recent call last): File "C:\Users\A\Desktop\数模校赛(B题)\demo3.py", line 118, in <module> d = auto_diff_order(processed_df, indicator) File "C:\Users\A\Desktop\数模校赛(B题)\demo3.py", line 78, in auto_diff_order adf_test(series, name=f'{column} at Diff Order {diff_order}') ~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\A\Desktop\数模校赛(B题)\demo3.py", line 66, in adf_test adf_result = adfuller(series, autolag='AIC') File "D:\Python\Python313\Lib\site-packages\statsmodels\tsa\stattools.py", line 326, in adfuller icbest, bestlag = _autolag( ~~~~~~~~^ OLS, xdshort, fullRHS, startlag, maxlag, autolag ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ) ^ File "D:\Python\Python313\Lib\site-packages\statsmodels\tsa\stattools.py", line 132, in _autolag mod_instance = mod(endog, exog[:, :lag], *modargs) File "D:\Python\Python313\Lib\site-packages\statsmodels\regression\linear_model.py", line 921, in __init__ super().__init__(endog, exog, missing=missing, ~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ hasconst=hasconst, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Python\Python313\Lib\site-packages\statsmodels\regression\linear_model.py", line 746, in __init__ super().__init__(endog, exog, missing=missing, ~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weights=weights, hasconst=hasconst, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Python\Python313\Lib\site-packages\statsmodels\regression\linear_model.py", line 200, in __init__ super().__init__(endog, exog, **kwargs) ~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Python\Python313\Lib\site-packages\statsmodels\base\model.py", line 270, in __init__ super().__init__(endog, exog, **kwargs) ~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Python\Python313\Lib\site-packages\statsmodels\base\model.py", line 95, in __init__ self.data = self._handle_data(endog, exog, missing, hasconst, ~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ **kwargs) ^^^^^^^^^ File "D:\Python\Python313\Lib\site-packages\statsmodels\base\model.py", line 135, in _handle_data data = handle_data(endog, exog, missing, hasconst, **kwargs) File "D:\Python\Python313\Lib\site-packages\statsmodels\base\data.py", line 675, in handle_data return klass(endog, exog=exog, missing=missing, hasconst=hasconst, **kwargs) File "D:\Python\Python313\Lib\site-packages\statsmodels\base\data.py", line 88, in __init__ self._handle_constant(hasconst) ~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^ File "D:\Python\Python313\Lib\site-packages\statsmodels\base\data.py", line 134, in _handle_constant raise MissingDataError('exog contains inf or nans') statsmodels.tools.sm_exceptions.MissingDataError: exog contains inf or nans
最新发布
03-24
Traceback (most recent call last): File "C:\Users\A\Desktop\数模校赛(B题)\demo3.py", line 145, in <module> stationary_after_diff = check_stationarity(df, f'{indicator}_diff') File "C:\Users\A\Desktop\数模校赛(B题)\demo3.py", line 49, in check_stationarity result = adfuller(data_series[column_name]) File "D:\Python\Python313\Lib\site-packages\statsmodels\tsa\stattools.py", line 326, in adfuller icbest, bestlag = _autolag( ~~~~~~~~^ OLS, xdshort, fullRHS, startlag, maxlag, autolag ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ) ^ File "D:\Python\Python313\Lib\site-packages\statsmodels\tsa\stattools.py", line 132, in _autolag mod_instance = mod(endog, exog[:, :lag], *modargs) File "D:\Python\Python313\Lib\site-packages\statsmodels\regression\linear_model.py", line 921, in __init__ super().__init__(endog, exog, missing=missing, ~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ hasconst=hasconst, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Python\Python313\Lib\site-packages\statsmodels\regression\linear_model.py", line 746, in __init__ super().__init__(endog, exog, missing=missing, ~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weights=weights, hasconst=hasconst, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Python\Python313\Lib\site-packages\statsmodels\regression\linear_model.py", line 200, in __init__ super().__init__(endog, exog, **kwargs) ~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Python\Python313\Lib\site-packages\statsmodels\base\model.py", line 270, in __init__ super().__init__(endog, exog, **kwargs) ~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^ File "D:\Python\Python313\Lib\site-packages\statsmodels\base\model.py", line 95, in __init__ self.data = self._handle_data(endog, exog, missing, hasconst, ~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ **kwargs) ^^^^^^^^^ File "D:\Python\Python313\Lib\site-packages\statsmodels\base\model.py", line 135, in _handle_data data = handle_data(endog, exog, missing, hasconst, **kwargs) File "D:\Python\Python313\Lib\site-packages\statsmodels\base\data.py", line 675, in handle_data return klass(endog, exog=exog, missing=missing, hasconst=hasconst, **kwargs) File "D:\Python\Python313\Lib\site-packages\statsmodels\base\data.py", line 88, in __init__ self._handle_constant(hasconst) ~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^ File "D:\Python\Python313\Lib\site-packages\statsmodels\base\data.py", line 134, in _handle_constant raise MissingDataError('exog contains inf or nans') statsmodels.tools.sm_exceptions.MissingDataError: exog contains inf or nans
03-24
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值