Remove 'using std::errro_code' from lib.
llvm-svn: 210871
diff --git a/llvm/lib/Support/Windows/Memory.inc b/llvm/lib/Support/Windows/Memory.inc
index 431cfb6..ae8371a 100644
--- a/llvm/lib/Support/Windows/Memory.inc
+++ b/llvm/lib/Support/Windows/Memory.inc
@@ -19,7 +19,6 @@
// The Windows.h header must be the last one included.
#include "WindowsSupport.h"
-using std::error_code;
namespace {
@@ -71,8 +70,8 @@
MemoryBlock Memory::allocateMappedMemory(size_t NumBytes,
const MemoryBlock *const NearBlock,
unsigned Flags,
- error_code &EC) {
- EC = error_code();
+ std::error_code &EC) {
+ EC = std::error_code();
if (NumBytes == 0)
return MemoryBlock();
@@ -115,9 +114,9 @@
return Result;
}
-error_code Memory::releaseMappedMemory(MemoryBlock &M) {
+ std::error_code Memory::releaseMappedMemory(MemoryBlock &M) {
if (M.Address == 0 || M.Size == 0)
- return error_code();
+ return std::error_code();
if (!VirtualFree(M.Address, 0, MEM_RELEASE))
return mapWindowsError(::GetLastError());
@@ -125,13 +124,13 @@
M.Address = 0;
M.Size = 0;
- return error_code();
+ return std::error_code();
}
-error_code Memory::protectMappedMemory(const MemoryBlock &M,
+ std::error_code Memory::protectMappedMemory(const MemoryBlock &M,
unsigned Flags) {
if (M.Address == 0 || M.Size == 0)
- return error_code();
+ return std::error_code();
DWORD Protect = getWindowsProtectionFlags(Flags);
@@ -142,7 +141,7 @@
if (Flags & MF_EXEC)
Memory::InvalidateInstructionCache(M.Address, M.Size);
- return error_code();
+ return std::error_code();
}
/// InvalidateInstructionCache - Before the JIT can run a block of code
@@ -158,18 +157,18 @@
const MemoryBlock *NearBlock,
std::string *ErrMsg) {
MemoryBlock MB;
- error_code EC;
+ std::error_code EC;
MB = allocateMappedMemory(NumBytes, NearBlock,
MF_READ|MF_WRITE|MF_EXEC, EC);
- if (EC != error_code() && ErrMsg) {
+ if (EC != std::error_code() && ErrMsg) {
MakeErrMsg(ErrMsg, EC.message());
}
return MB;
}
bool Memory::ReleaseRWX(MemoryBlock &M, std::string *ErrMsg) {
- error_code EC = releaseMappedMemory(M);
- if (EC == error_code())
+ std::error_code EC = releaseMappedMemory(M);
+ if (EC == std::error_code())
return false;
MakeErrMsg(ErrMsg, EC.message());
return true;
diff --git a/llvm/lib/Support/Windows/Process.inc b/llvm/lib/Support/Windows/Process.inc
index c88557c..81aee0e 100644
--- a/llvm/lib/Support/Windows/Process.inc
+++ b/llvm/lib/Support/Windows/Process.inc
@@ -48,7 +48,6 @@
using namespace llvm;
using namespace sys;
-using std::error_code;
process::id_type self_process::get_id() {
return GetCurrentProcessId();
@@ -180,16 +179,16 @@
return std::string(Res.data());
}
-static error_code windows_error(DWORD E) {
+static std::error_code windows_error(DWORD E) {
return mapWindowsError(E);
}
-error_code
+std::error_code
Process::GetArgumentVector(SmallVectorImpl<const char *> &Args,
ArrayRef<const char *>,
SpecificBumpPtrAllocator<char> &ArgAllocator) {
int NewArgCount;
- error_code ec;
+ std::error_code ec;
wchar_t **UnicodeCommandLine = CommandLineToArgvW(GetCommandLineW(),
&NewArgCount);
@@ -214,7 +213,7 @@
if (ec)
return ec;
- return error_code();
+ return std::error_code();
}
bool Process::StandardInIsUserInput() {
diff --git a/llvm/lib/Support/Windows/Program.inc b/llvm/lib/Support/Windows/Program.inc
index 8d1df5f..b2f71ae 100644
--- a/llvm/lib/Support/Windows/Program.inc
+++ b/llvm/lib/Support/Windows/Program.inc
@@ -24,7 +24,6 @@
//===----------------------------------------------------------------------===//
namespace llvm {
-using std::error_code;
using namespace sys;
ProcessInfo::ProcessInfo() : ProcessHandle(0), Pid(0), ReturnCode(0) {}
@@ -227,7 +226,7 @@
// an environment block by concatenating them.
for (unsigned i = 0; envp[i]; ++i) {
SmallVector<wchar_t, MAX_PATH> EnvString;
- if (error_code ec = windows::UTF8ToUTF16(envp[i], EnvString)) {
+ if (std::error_code ec = windows::UTF8ToUTF16(envp[i], EnvString)) {
SetLastError(ec.value());
MakeErrMsg(ErrMsg, "Unable to convert environment variable to UTF-16");
return false;
@@ -291,7 +290,7 @@
fflush(stderr);
SmallVector<wchar_t, MAX_PATH> ProgramUtf16;
- if (error_code ec = windows::UTF8ToUTF16(Program, ProgramUtf16)) {
+ if (std::error_code ec = windows::UTF8ToUTF16(Program, ProgramUtf16)) {
SetLastError(ec.value());
MakeErrMsg(ErrMsg,
std::string("Unable to convert application name to UTF-16"));
@@ -299,7 +298,7 @@
}
SmallVector<wchar_t, MAX_PATH> CommandUtf16;
- if (error_code ec = windows::UTF8ToUTF16(command.get(), CommandUtf16)) {
+ if (std::error_code ec = windows::UTF8ToUTF16(command.get(), CommandUtf16)) {
SetLastError(ec.value());
MakeErrMsg(ErrMsg,
std::string("Unable to convert command-line to UTF-16"));
@@ -423,18 +422,18 @@
return WaitResult;
}
-error_code sys::ChangeStdinToBinary(){
+ std::error_code sys::ChangeStdinToBinary(){
int result = _setmode( _fileno(stdin), _O_BINARY );
if (result == -1)
- return error_code(errno, std::generic_category());
- return error_code();
+ return std::error_code(errno, std::generic_category());
+ return std::error_code();
}
-error_code sys::ChangeStdoutToBinary(){
+ std::error_code sys::ChangeStdoutToBinary(){
int result = _setmode( _fileno(stdout), _O_BINARY );
if (result == -1)
- return error_code(errno, std::generic_category());
- return error_code();
+ return std::error_code(errno, std::generic_category());
+ return std::error_code();
}
bool llvm::sys::argumentsFitWithinSystemLimits(ArrayRef<const char*> Args) {